diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 233603ea5440..4672c02d043a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python-mono-repo:latest - digest: sha256:1f42c1d6b70210540f55110662ae80e22b03dfb897782b09e546148599d3336c + digest: sha256:93fe03a099be9aa70157b95f5600473602d53b34983488ccdcd37cecb6a9ba3e diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index d1dca77307fa..000000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -**NOTE**: [Google Cloud Python](https://cloud.google.com/python/docs/reference) client libraries are no longer maintained inside this repository. Please visit the python-API repository (e.g., https://github.com/googleapis/python-pubsub/issues) for faster response times. - -See all published libraries in the [README](https://github.com/googleapis/google-cloud-python/blob/main/README.rst). diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml new file mode 100644 index 000000000000..8a649dece1a6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -0,0 +1,93 @@ +name: Bug report +description: Create a report to help us improve +labels: ["type: bug","triage me"] +body: +- type: markdown + attributes: + value: | + _**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response._ +- type: checkboxes + attributes: + label: Determine this is the right repository + description: Please follow the steps in [the SUPPORT guide](https://github.com/googleapis/google-cloud-python/blob/issue-templates/SUPPORT.md) and determine that this is likely a client library issue and this is the correct repository in which to report it. + options: + - label: I determined this is the correct repository in which to report this bug. + required: true +- type: textarea + attributes: + label: Summary of the issue + description: Describe succinctly what you expected to happen, and what actually happened. + placeholder: Describe the issue you encountered + value: | + **Context** + _e.g. I was trying to use the new method Foo_ + + **Expected Behavior:** + _e.g. I expected method Foo to paginate_ + + **Actual Behavior:** + _e.g. Method Foo did not paginate_ +- type: input + attributes: + label: API client name and version + description: Try `pip list | grep google`, which should give you a name corresponding to one of the packages listed [here](https://github.com/googleapis/google-cloud-python/tree/main/packages). + placeholder: e.g. google-cloud-vision v3.7.2 +- type: textarea + attributes: + label: 'Reproduction steps: code' + description: Include a minimal but complete set of Python files that will reproduce the problem. + value: | + file: main.py + ```python + def reproduce(): + # complete code here + ``` + + +- type: textarea + attributes: + label: 'Reproduction steps: supporting files' + description: Include any files needed for the code above to work. + value: | + file: mydata.csv + ``` + alpha,1,3 + beta,2,5 + ``` + + +- type: textarea + attributes: + label: 'Reproduction steps: actual results' + description: Include the results you got when running your code, such as output files. + value: | + file: output.txtmydata.csv + ``` + Calculated: foo + ``` +- type: textarea + attributes: + label: 'Reproduction steps: expected results' + description: Include the results you expected to get when running your code. + value: | + file: output.txtmydata.csv + ``` + Calculated: bar + ``` +- type: input + attributes: + label: OS & version + platform + placeholder: e.g. Debian 6.6 on GKE +- type: input + attributes: + label: Python environment + description: Try `python3 --version`. + placeholder: e.g. Python 3.11.8 +- type: textarea + attributes: + label: Python dependencies + description: Try `pip list`. + placeholder: | + pip list + Package Version + --------------------------- ------------------------ diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index fa234a6a8348..000000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -**NOTE**: [Google Cloud Python](https://cloud.google.com/python/docs/reference) client libraries are no longer maintained inside this repository. Please visit the python-API repository (e.g., https://github.com/googleapis/python-pubsub/issues) for faster response times. - -See all published libraries in the [README](https://github.com/googleapis/google-cloud-python/blob/main/README.rst). diff --git a/.github/ISSUE_TEMPLATE/feature_request.yaml b/.github/ISSUE_TEMPLATE/feature_request.yaml new file mode 100644 index 000000000000..2f1d8e129aa5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yaml @@ -0,0 +1,47 @@ +name: Feature request +description: Request a feature to help us make the libraries more useful +labels: ["type: feature request","triage me"] +body: +- type: markdown + attributes: + value: | + _**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response._ +- type: checkboxes + attributes: + label: Determine this is the right repository + description: Please follow the steps in [the SUPPORT guide](https://github.com/googleapis/google-cloud-python/blob/issue-templates/SUPPORT.md) and determine that this is likely a client library issue and this is the correct repository in which to report it. + options: + - label: I determined this is the correct repository in which to report this feature request. + required: true +- type: textarea + attributes: + label: Summary of the feature request + description: Describe succinctly what feature you would like the client library to add. + placeholder: Feature request summary + value: | + _e.g. I would like to easily be able to do FOO by simply coding BAR._ +- type: textarea + attributes: + label: 'Desired code experience' + description: Include a minimal code snippet of how your requested feature might look in actual code, including how you would use the results. + value: | + file: main.py + ```python + def my_ideal_code(): + ... + some_data = my_new_feature() + do_something_with(some_data)) + ``` + + +- type: textarea + attributes: + label: 'Expected results' + description: Describe the results you would expect with the code above. + value: | + _e.g. `my_new_feature()` should return FOO_ +- type: input + attributes: + label: API client name and version + description: If there's a particular API client for which this feature would be helpful, enter it here. Try `pip list | grep google`, which should give you a name corresponding to one of the packages listed [here](https://github.com/googleapis/google-cloud-python/tree/main/packages). + placeholder: e.g. google-cloud-vision v3.7.2 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 41eaf0cbc231..fe001c57451b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,64 @@ permissions: contents: read jobs: + # The two jobs "docs" and "docsfx" are marked as required checks + # (and reset as such periodically) elsewhere in our + # automation. Since we don't want to block non-release PRs on docs + # failures, we want these checks to always show up as succeeded for + # those PRs. For release PRs, we do want the checks to run and block + # merge on failure. + # + # We accomplish this by using an "if:" conditional. Jobs + # thus skipped via a conditional (i.e. a false condition) show as + # having succeeded. See: + # https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/troubleshooting-required-status-checks#handling-skipped-but-required-checks + # + # Since we want advance notice of docs errors, we also have two + # corresponding non-required checks, the jobs "docs-warnings" and + # "docfx-warnings", that run for all non-release PRs (i.e., when the + # "docs" and "docfx" jobs don't run). + # + # + # PLEASE ENSURE THE FOLLOWING AT ALL TIMES: + # + # - the "*-warnings" checks remain NON-REQUIRED in the repo + # settings. + # + # - the steps for the jobs "docs" and "docfx" are identical to the + # ones in "docs-warnings" and "docfx-warnings", respectively. We + # will be able to avoid config duplication once GitHub actions + # support YAML anchors (see + # https://github.com/actions/runner/issues/1182) docs: + if: github.actor == 'release-please[bot]' + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + # Use a fetch-depth of 2 to avoid error `fatal: origin/main...HEAD: no merge base` + # See https://github.com/googleapis/google-cloud-python/issues/12013 + # and https://github.com/actions/checkout#checkout-head. + with: + fetch-depth: 2 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + env: + BUILD_TYPE: presubmit + TEST_TYPE: docs + PY_VERSION: "3.10" + run: | + ci/run_conditional_tests.sh + docs-warnings: + if: github.actor != 'release-please[bot]' + name: "Docs warnings: will block release" + continue-on-error: true runs-on: ubuntu-latest steps: - name: Checkout @@ -38,6 +95,35 @@ jobs: run: | ci/run_conditional_tests.sh docfx: + if: github.actor == 'release-please[bot]' + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + # Use a fetch-depth of 2 to avoid error `fatal: origin/main...HEAD: no merge base` + # See https://github.com/googleapis/google-cloud-python/issues/12013 + # and https://github.com/actions/checkout#checkout-head. + with: + fetch-depth: 2 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + env: + BUILD_TYPE: presubmit + TEST_TYPE: docfx + PY_VERSION: "3.10" + run: | + ci/run_conditional_tests.sh + docfx-warnings: + if: github.actor != 'release-please[bot]' + name: "Docfx warnings: will block release" + continue-on-error: true runs-on: ubuntu-latest steps: - name: Checkout diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 88f2d6d9f02e..b06087ccb849 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -20,6 +20,7 @@ on: # * is a special character in YAML so you have to quote this string # Run this Github Action every Tuesday at 7 AM UTC - cron: '0 7 * * 2' + workflow_dispatch: permissions: read-all diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 3e5c11a26f47..7ab92ad9119f 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -444,9 +444,9 @@ readme-renderer==42.0 \ --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.2 \ + --hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ + --hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c # via # gcp-releasetool # google-api-core @@ -487,9 +487,9 @@ typing-extensions==4.8.0 \ --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine diff --git a/.pinned-metadata.yaml b/.pinned-metadata.yaml new file mode 100644 index 000000000000..723dbb2e1284 --- /dev/null +++ b/.pinned-metadata.yaml @@ -0,0 +1,4 @@ +packages/google-maps-places: + release_level: preview + _reason: "The protos contain messages and fields marked 'experimental'" + diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b9e7232f502e..7f394b168eb2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,10 +1,10 @@ { "packages/google-ads-admanager": "0.1.0", - "packages/google-ai-generativelanguage": "0.6.3", + "packages/google-ai-generativelanguage": "0.6.6", "packages/google-analytics-admin": "0.22.7", "packages/google-analytics-data": "0.18.8", "packages/google-apps-card": "0.1.2", - "packages/google-apps-chat": "0.1.6", + "packages/google-apps-chat": "0.1.7", "packages/google-apps-events-subscriptions": "0.1.0", "packages/google-apps-meet": "0.1.6", "packages/google-apps-script-type": "0.3.8", @@ -24,9 +24,9 @@ "packages/google-cloud-asset": "3.26.1", "packages/google-cloud-assured-workloads": "1.12.3", "packages/google-cloud-automl": "2.13.3", - "packages/google-cloud-backupdr": "0.1.0", + "packages/google-cloud-backupdr": "0.1.1", "packages/google-cloud-bare-metal-solution": "1.7.3", - "packages/google-cloud-batch": "0.17.20", + "packages/google-cloud-batch": "0.17.21", "packages/google-cloud-beyondcorp-appconnections": "0.4.9", "packages/google-cloud-beyondcorp-appconnectors": "0.4.9", "packages/google-cloud-beyondcorp-appgateways": "0.4.9", @@ -37,17 +37,17 @@ "packages/google-cloud-bigquery-connection": "1.15.3", "packages/google-cloud-bigquery-data-exchange": "0.5.11", "packages/google-cloud-bigquery-datapolicies": "0.6.6", - "packages/google-cloud-bigquery-datatransfer": "3.15.2", + "packages/google-cloud-bigquery-datatransfer": "3.15.3", "packages/google-cloud-bigquery-logging": "1.4.3", "packages/google-cloud-bigquery-migration": "0.11.7", "packages/google-cloud-bigquery-reservation": "1.13.3", - "packages/google-cloud-billing": "1.13.3", + "packages/google-cloud-billing": "1.13.4", "packages/google-cloud-billing-budgets": "1.14.3", "packages/google-cloud-binary-authorization": "1.10.3", "packages/google-cloud-build": "3.24.0", "packages/google-cloud-certificate-manager": "1.7.0", "packages/google-cloud-channel": "1.18.3", - "packages/google-cloud-cloudcontrolspartner": "0.1.0", + "packages/google-cloud-cloudcontrolspartner": "0.1.1", "packages/google-cloud-cloudquotas": "0.1.8", "packages/google-cloud-commerce-consumer-procurement": "0.1.5", "packages/google-cloud-common": "1.3.3", @@ -55,7 +55,7 @@ "packages/google-cloud-confidentialcomputing": "0.4.8", "packages/google-cloud-config": "0.1.9", "packages/google-cloud-contact-center-insights": "1.17.3", - "packages/google-cloud-container": "2.45.0", + "packages/google-cloud-container": "2.47.0", "packages/google-cloud-containeranalysis": "2.14.3", "packages/google-cloud-contentwarehouse": "0.7.7", "packages/google-cloud-data-fusion": "1.10.3", @@ -65,56 +65,59 @@ "packages/google-cloud-dataflow-client": "0.8.10", "packages/google-cloud-dataform": "0.5.9", "packages/google-cloud-datalabeling": "1.10.3", - "packages/google-cloud-dataplex": "2.0.0", - "packages/google-cloud-dataproc": "5.9.3", + "packages/google-cloud-dataplex": "2.2.0", + "packages/google-cloud-dataproc": "5.10.0", "packages/google-cloud-dataproc-metastore": "1.15.3", "packages/google-cloud-datastream": "1.9.3", "packages/google-cloud-deploy": "1.19.0", + "packages/google-cloud-developerconnect": "0.1.0", "packages/google-cloud-dialogflow": "2.30.0", - "packages/google-cloud-dialogflow-cx": "1.33.0", - "packages/google-cloud-discoveryengine": "0.11.11", - "packages/google-cloud-dlp": "3.17.0", + "packages/google-cloud-dialogflow-cx": "1.34.0", + "packages/google-cloud-discoveryengine": "0.11.13", + "packages/google-cloud-dlp": "3.18.0", "packages/google-cloud-dms": "1.9.3", - "packages/google-cloud-documentai": "2.27.1", + "packages/google-cloud-documentai": "2.29.1", "packages/google-cloud-domains": "1.7.3", "packages/google-cloud-edgecontainer": "0.5.9", - "packages/google-cloud-edgenetwork": "0.1.7", + "packages/google-cloud-edgenetwork": "0.1.8", "packages/google-cloud-enterpriseknowledgegraph": "0.3.9", "packages/google-cloud-essential-contacts": "1.7.3", "packages/google-cloud-eventarc": "1.11.3", "packages/google-cloud-eventarc-publishing": "0.6.9", "packages/google-cloud-filestore": "1.9.3", "packages/google-cloud-functions": "1.16.3", - "packages/google-cloud-gke-backup": "0.5.8", + "packages/google-cloud-gdchardwaremanagement": "0.1.1", + "packages/google-cloud-gke-backup": "0.5.9", "packages/google-cloud-gke-connect-gateway": "0.8.9", - "packages/google-cloud-gke-hub": "1.13.1", + "packages/google-cloud-gke-hub": "1.14.0", "packages/google-cloud-gke-multicloud": "0.6.10", "packages/google-cloud-gsuiteaddons": "0.3.8", "packages/google-cloud-iam": "2.15.0", "packages/google-cloud-iam-logging": "1.3.3", "packages/google-cloud-iap": "1.13.3", "packages/google-cloud-ids": "1.7.3", - "packages/google-cloud-kms": "2.23.0", + "packages/google-cloud-kms": "2.24.0", "packages/google-cloud-kms-inventory": "0.2.6", "packages/google-cloud-language": "2.13.3", - "packages/google-cloud-life-sciences": "0.9.9", + "packages/google-cloud-life-sciences": "0.9.10", "packages/google-cloud-managed-identities": "1.9.3", + "packages/google-cloud-managedkafka": "0.1.0", "packages/google-cloud-media-translation": "0.11.9", "packages/google-cloud-memcache": "1.9.3", "packages/google-cloud-migrationcenter": "0.1.7", - "packages/google-cloud-monitoring": "2.21.0", + "packages/google-cloud-monitoring": "2.22.0", "packages/google-cloud-monitoring-dashboards": "2.15.1", "packages/google-cloud-monitoring-metrics-scopes": "1.6.3", - "packages/google-cloud-netapp": "0.3.9", + "packages/google-cloud-netapp": "0.3.10", "packages/google-cloud-network-connectivity": "2.4.3", "packages/google-cloud-network-management": "1.17.1", "packages/google-cloud-network-security": "0.9.9", - "packages/google-cloud-network-services": "0.5.10", + "packages/google-cloud-network-services": "0.5.12", "packages/google-cloud-notebooks": "1.10.3", "packages/google-cloud-optimization": "1.8.3", "packages/google-cloud-orchestration-airflow": "1.12.1", "packages/google-cloud-os-config": "1.17.3", - "packages/google-cloud-os-login": "2.14.3", + "packages/google-cloud-os-login": "2.14.4", "packages/google-cloud-parallelstore": "0.2.0", "packages/google-cloud-phishing-protection": "1.11.3", "packages/google-cloud-policy-troubleshooter": "1.11.3", @@ -124,20 +127,20 @@ "packages/google-cloud-private-catalog": "0.9.9", "packages/google-cloud-public-ca": "0.3.10", "packages/google-cloud-rapidmigrationassessment": "0.1.7", - "packages/google-cloud-recaptcha-enterprise": "1.20.0", + "packages/google-cloud-recaptcha-enterprise": "1.21.0", "packages/google-cloud-recommendations-ai": "0.10.10", "packages/google-cloud-recommender": "2.15.3", "packages/google-cloud-redis": "2.15.3", - "packages/google-cloud-redis-cluster": "0.1.5", + "packages/google-cloud-redis-cluster": "0.1.6", "packages/google-cloud-resource-manager": "1.12.3", - "packages/google-cloud-resource-settings": "1.9.3", - "packages/google-cloud-retail": "1.20.1", + "packages/google-cloud-resource-settings": "1.9.4", + "packages/google-cloud-retail": "1.21.0", "packages/google-cloud-run": "0.10.5", "packages/google-cloud-scheduler": "2.13.3", "packages/google-cloud-secret-manager": "2.20.0", "packages/google-cloud-securesourcemanager": "0.1.5", - "packages/google-cloud-securitycenter": "1.31.0", - "packages/google-cloud-securitycentermanagement": "0.1.8", + "packages/google-cloud-securitycenter": "1.32.0", + "packages/google-cloud-securitycentermanagement": "0.1.12", "packages/google-cloud-service-control": "1.12.1", "packages/google-cloud-service-directory": "1.11.4", "packages/google-cloud-service-management": "1.8.3", @@ -146,7 +149,7 @@ "packages/google-cloud-shell": "1.9.3", "packages/google-cloud-source-context": "1.5.3", "packages/google-cloud-speech": "2.26.0", - "packages/google-cloud-storage-control": "0.2.0", + "packages/google-cloud-storage-control": "1.0.1", "packages/google-cloud-storage-transfer": "1.11.3", "packages/google-cloud-storageinsights": "0.1.8", "packages/google-cloud-support": "0.1.7", @@ -158,7 +161,7 @@ "packages/google-cloud-trace": "1.13.3", "packages/google-cloud-translate": "3.15.3", "packages/google-cloud-video-live-stream": "1.7.3", - "packages/google-cloud-video-stitcher": "0.7.9", + "packages/google-cloud-video-stitcher": "0.7.10", "packages/google-cloud-video-transcoder": "1.12.3", "packages/google-cloud-videointelligence": "2.13.3", "packages/google-cloud-vision": "3.7.2", @@ -174,18 +177,22 @@ "packages/google-maps-addressvalidation": "0.3.11", "packages/google-maps-fleetengine": "0.2.0", "packages/google-maps-fleetengine-delivery": "0.2.0", - "packages/google-maps-mapsplatformdatasets": "0.3.7", - "packages/google-maps-places": "0.1.14", + "packages/google-maps-mapsplatformdatasets": "0.4.0", + "packages/google-maps-places": "0.1.15", "packages/google-maps-routeoptimization": "0.1.0", "packages/google-maps-routing": "0.6.8", "packages/google-maps-solar": "0.1.0", - "packages/google-shopping-css": "0.1.4", - "packages/google-shopping-merchant-conversions": "0.1.0", - "packages/google-shopping-merchant-inventories": "0.1.6", - "packages/google-shopping-merchant-lfp": "0.1.0", + "packages/google-shopping-css": "0.1.5", + "packages/google-shopping-merchant-accounts": "0.1.1", + "packages/google-shopping-merchant-conversions": "0.1.1", + "packages/google-shopping-merchant-datasources": "0.1.0", + "packages/google-shopping-merchant-inventories": "0.1.7", + "packages/google-shopping-merchant-lfp": "0.1.1", "packages/google-shopping-merchant-notifications": "0.1.0", + "packages/google-shopping-merchant-products": "0.1.0", + "packages/google-shopping-merchant-promotions": "0.1.0", "packages/google-shopping-merchant-quota": "0.1.0", - "packages/google-shopping-merchant-reports": "0.1.6", + "packages/google-shopping-merchant-reports": "0.1.7", "packages/google-shopping-type": "0.1.6", "packages/grafeas": "1.10.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index ea96b132e734..ab9880c9b2f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,11 +3,11 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- - [google-ads-admanager==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.22.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) - [google-analytics-data==0.18.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-chat==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) - [google-apps-meet==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) - [google-apps-script-type==0.3.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) @@ -27,9 +27,9 @@ Changelogs - [google-cloud-asset==3.26.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) - [google-cloud-assured-workloads==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) - [google-cloud-automl==2.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) -- [google-cloud-backupdr==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) +- [google-cloud-backupdr==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.19](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.21](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -40,25 +40,25 @@ Changelogs - [google-cloud-bigquery-connection==1.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) - [google-cloud-bigquery-data-exchange==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) - [google-cloud-bigquery-datapolicies==0.6.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) - [google-cloud-bigquery-migration==0.11.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) - [google-cloud-bigquery-reservation==1.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) - [google-cloud-billing-budgets==1.14.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-billing==1.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) - [google-cloud-binary-authorization==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) - [google-cloud-build==3.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) - [google-cloud-certificate-manager==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) - [google-cloud-channel==1.18.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) - [google-cloud-cloudquotas==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) - [google-cloud-commerce-consumer-procurement==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) - [google-cloud-common==1.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) -- [google-cloud-compute==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) +- [google-cloud-compute==1.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) - [google-cloud-confidentialcomputing==0.4.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) - [google-cloud-config==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) - [google-cloud-contact-center-insights==1.17.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.45.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-container==2.47.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) - [google-cloud-containeranalysis==2.14.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) - [google-cloud-contentwarehouse==0.7.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) - [google-cloud-data-fusion==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) @@ -68,17 +68,18 @@ Changelogs - [google-cloud-dataflow-client==0.8.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) - [google-cloud-dataform==0.5.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) - [google-cloud-datalabeling==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataplex==2.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) - [google-cloud-dataproc==5.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==1.18.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-deploy==1.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-developerconnect==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.33.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) - [google-cloud-dialogflow==2.30.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-discoveryengine==0.11.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-documentai==2.29.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) - [google-cloud-edgecontainer==0.5.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) - [google-cloud-edgenetwork==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) @@ -88,68 +89,70 @@ Changelogs - [google-cloud-eventarc==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) - [google-cloud-filestore==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) - [google-cloud-functions==1.16.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gke-backup==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==null](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gke-backup==0.5.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) - [google-cloud-gke-connect-gateway==0.8.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) -- [google-cloud-gke-hub==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gke-hub==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-gsuiteaddons==0.3.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) - [google-cloud-iam-logging==1.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) - [google-cloud-iam==2.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) - [google-cloud-iap==1.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) - [google-cloud-ids==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) - [google-cloud-kms-inventory==0.2.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==2.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-kms==2.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) - [google-cloud-language==2.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) - [google-cloud-life-sciences==0.9.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) - [google-cloud-managed-identities==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) +- [google-cloud-managedkafka==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) - [google-cloud-media-translation==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) - [google-cloud-memcache==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) - [google-cloud-migrationcenter==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) - [google-cloud-monitoring-dashboards==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) - [google-cloud-monitoring-metrics-scopes==1.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-monitoring==2.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-monitoring==2.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) +- [google-cloud-netapp==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) - [google-cloud-network-connectivity==2.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) - [google-cloud-network-management==1.17.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) - [google-cloud-network-security==0.9.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) -- [google-cloud-network-services==0.5.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) +- [google-cloud-network-services==0.5.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) - [google-cloud-notebooks==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) - [google-cloud-optimization==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) - [google-cloud-orchestration-airflow==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) - [google-cloud-os-config==1.17.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) - [google-cloud-os-login==2.14.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) -- [google-cloud-parallelstore==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) +- [google-cloud-parallelstore==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) - [google-cloud-phishing-protection==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) - [google-cloud-policy-troubleshooter==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) - [google-cloud-policysimulator==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) - [google-cloud-policytroubleshooter-iam==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) - [google-cloud-private-ca==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) - [google-cloud-private-catalog==0.9.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) -- [google-cloud-public-ca==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) +- [google-cloud-public-ca==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) - [google-cloud-recaptcha-enterprise==1.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) - [google-cloud-recommender==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) -- [google-cloud-redis-cluster==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) +- [google-cloud-redis-cluster==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) - [google-cloud-redis==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) - [google-cloud-resource-manager==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) -- [google-cloud-resource-settings==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-retail==1.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) +- [google-cloud-resource-settings==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) +- [google-cloud-retail==1.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) - [google-cloud-run==0.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) - [google-cloud-scheduler==2.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) - [google-cloud-secret-manager==2.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) - [google-cloud-securesourcemanager==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) - [google-cloud-securitycenter==1.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) -- [google-cloud-securitycentermanagement==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) +- [google-cloud-securitycentermanagement==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) - [google-cloud-service-control==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-directory==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) - [google-cloud-service-management==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) - [google-cloud-service-usage==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) - [google-cloud-servicehealth==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) - [google-cloud-shell==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) - [google-cloud-source-context==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) - [google-cloud-speech==2.26.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) -- [google-cloud-storage-control==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) +- [google-cloud-storage-control==1.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) - [google-cloud-storage-transfer==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) - [google-cloud-storageinsights==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) - [google-cloud-support==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) @@ -161,11 +164,11 @@ Changelogs - [google-cloud-trace==1.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) - [google-cloud-translate==3.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) - [google-cloud-video-live-stream==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) -- [google-cloud-video-stitcher==0.7.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) +- [google-cloud-video-stitcher==0.7.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) - [google-cloud-video-transcoder==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) - [google-cloud-videointelligence==2.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) - [google-cloud-vision==3.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-visionai==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) - [google-cloud-vm-migration==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) - [google-cloud-vmwareengine==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) - [google-cloud-vpc-access==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) @@ -175,20 +178,24 @@ Changelogs - [google-cloud-workstations==0.5.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) -- [google-maps-fleetengine==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) -- [google-maps-mapsplatformdatasets==0.3.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) +- [google-maps-mapsplatformdatasets==0.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) +- [google-maps-places==0.1.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) - [google-maps-routing==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) - [google-maps-solar==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) -- [google-shopping-css==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) -- [google-shopping-merchant-conversions==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) -- [google-shopping-merchant-inventories==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) -- [google-shopping-merchant-lfp==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) +- [google-shopping-css==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-merchant-accounts==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) +- [google-shopping-merchant-conversions==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) +- [google-shopping-merchant-datasources==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) +- [google-shopping-merchant-inventories==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) +- [google-shopping-merchant-lfp==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) - [google-shopping-merchant-notifications==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) +- [google-shopping-merchant-products==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) +- [google-shopping-merchant-promotions==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) - [google-shopping-merchant-quota==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) -- [google-shopping-merchant-reports==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) +- [google-shopping-merchant-reports==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) - [google-shopping-type==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) - [grafeas==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) diff --git a/README.rst b/README.rst index 3c4d9cc7aacd..afed928d5568 100644 --- a/README.rst +++ b/README.rst @@ -70,202 +70,1536 @@ Libraries * - Client - Release Level - Version + - API Issues + - File an API Issue + - Client Library Issues + * - `A python wrapper of the C library 'CRC32C' `_ + - stable + - |PyPI-google-crc32c| + - + - + - `Client Library Issues `_ * - `AI Platform `_ - - |stable| + - stable - |PyPI-google-cloud-aiplatform| - * - `App Engine Admin `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `AI Platform Notebooks `_ + - stable + - |PyPI-google-cloud-notebooks| + - + - + - `Client Library Issues `_ + * - `API Gateway `_ + - stable + - |PyPI-google-cloud-api-gateway| + - + - + - `Client Library Issues `_ + * - `Access Approval `_ + - stable + - |PyPI-google-cloud-access-approval| + - + - + - `Client Library Issues `_ + * - `Apigee Connect `_ + - stable + - |PyPI-google-cloud-apigee-connect| + - + - + - `Client Library Issues `_ + * - `App Engine Admin `_ + - stable - |PyPI-google-cloud-appengine-admin| - * - `Asset Inventory `_ - - |stable| + - + - + - `Client Library Issues `_ + * - `App Engine Logging Protos `_ + - stable + - |PyPI-google-cloud-appengine-logging| + - + - + - `Client Library Issues `_ + * - `Artifact Registry `_ + - stable + - |PyPI-google-cloud-artifact-registry| + - + - + - `Client Library Issues `_ + * - `Asset Inventory `_ + - stable - |PyPI-google-cloud-asset| - * - `AutoML `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Assured Workloads for Government `_ + - stable + - |PyPI-google-cloud-assured-workloads| + - + - + - `Client Library Issues `_ + * - `AutoML `_ + - stable - |PyPI-google-cloud-automl| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Bare Metal Solution `_ + - stable + - |PyPI-google-cloud-bare-metal-solution| + - + - + - `Client Library Issues `_ * - `BigQuery `_ - - |stable| + - stable - |PyPI-google-cloud-bigquery| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `BigQuery Connection `_ + - stable + - |PyPI-google-cloud-bigquery-connection| + - + - + - `Client Library Issues `_ + * - `BigQuery Data Transfer `_ + - stable + - |PyPI-google-cloud-bigquery-datatransfer| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `BigQuery Logging Protos `_ + - stable + - |PyPI-google-cloud-bigquery-logging| + - + - + - `Client Library Issues `_ + * - `BigQuery Reservation `_ + - stable + - |PyPI-google-cloud-bigquery-reservation| + - + - + - `Client Library Issues `_ * - `BigQuery Storage `_ - - |stable| + - stable - |PyPI-google-cloud-bigquery-storage| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `BigQuery connector for Jupyter and IPython `_ + - stable + - |PyPI-bigquery-magics| + - + - + - `Client Library Issues `_ * - `Bigtable `_ - - |stable| + - stable - |PyPI-google-cloud-bigtable| - * - `Binary Authorization `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Billing `_ + - stable + - |PyPI-google-cloud-billing| + - + - + - `Client Library Issues `_ + * - `Billing Budget `_ + - stable + - |PyPI-google-cloud-billing-budgets| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Binary Authorization `_ + - stable - |PyPI-google-cloud-binary-authorization| - * - `Build `_ - - |stable| + - + - + - `Client Library Issues `_ + * - `Build `_ + - stable - |PyPI-google-cloud-build| - * - `Common `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Certificate Manager `_ + - stable + - |PyPI-google-cloud-certificate-manager| + - + - + - `Client Library Issues `_ + * - `Channel Services `_ + - stable + - |PyPI-google-cloud-channel| + - + - + - `Client Library Issues `_ + * - `Common `_ + - stable - |PyPI-google-cloud-common| - * - `Compute Engine `_ - - |stable| + - + - + - `Client Library Issues `_ + * - `Composer `_ + - stable + - |PyPI-google-cloud-orchestration-airflow| + - + - + - `Client Library Issues `_ + * - `Compute Engine `_ + - stable - |PyPI-google-cloud-compute| - * - `Container Analysis `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Contact Center AI Insights `_ + - stable + - |PyPI-google-cloud-contact-center-insights| + - + - + - `Client Library Issues `_ + * - `Container Analysis `_ + - stable - |PyPI-google-cloud-containeranalysis| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Data Catalog `_ + - stable + - |PyPI-google-cloud-datacatalog| + - + - + - `Client Library Issues `_ + * - `Data Fusion `_ + - stable + - |PyPI-google-cloud-data-fusion| + - + - + - `Client Library Issues `_ + * - `Data Loss Prevention `_ + - stable + - |PyPI-google-cloud-dlp| + - + - + - `Client Library Issues `_ + * - `Database Migration Service `_ + - stable + - |PyPI-google-cloud-dms| + - + - + - `Client Library Issues `_ + * - `Dataplex `_ + - stable + - |PyPI-google-cloud-dataplex| + - + - + - `Client Library Issues `_ + * - `Dataproc `_ + - stable + - |PyPI-google-cloud-dataproc| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Dataproc Metastore `_ + - stable + - |PyPI-google-cloud-dataproc-metastore| + - + - + - `Client Library Issues `_ * - `Datastore `_ - - |stable| + - stable - |PyPI-google-cloud-datastore| - * - `Filestore `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Datastream `_ + - stable + - |PyPI-google-cloud-datastream| + - + - + - `Client Library Issues `_ + * - `Deploy `_ + - stable + - |PyPI-google-cloud-deploy| + - + - + - `Client Library Issues `_ + * - `Dialogflow `_ + - stable + - |PyPI-google-cloud-dialogflow| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Dialogflow CX `_ + - stable + - |PyPI-google-cloud-dialogflow-cx| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Document AI `_ + - stable + - |PyPI-google-cloud-documentai| + - + - + - `Client Library Issues `_ + * - `Domains `_ + - stable + - |PyPI-google-cloud-domains| + - + - + - `Client Library Issues `_ + * - `Essential Contacts `_ + - stable + - |PyPI-google-cloud-essential-contacts| + - + - + - `Client Library Issues `_ + * - `Eventarc `_ + - stable + - |PyPI-google-cloud-eventarc| + - + - + - `Client Library Issues `_ + * - `Filestore `_ + - stable - |PyPI-google-cloud-filestore| + - + - + - `Client Library Issues `_ * - `Firestore `_ - - |stable| + - stable - |PyPI-google-cloud-firestore| - * - `GKE Hub `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Functions `_ + - stable + - |PyPI-google-cloud-functions| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `GKE Hub `_ + - stable - |PyPI-google-cloud-gke-hub| - * - `Grafeas `_ - - |stable| + - + - + - `Client Library Issues `_ + * - `Grafeas `_ + - stable - |PyPI-grafeas| + - + - + - `Client Library Issues `_ + * - `IAM Logging Protos `_ + - stable + - |PyPI-google-cloud-iam-logging| + - + - + - `Client Library Issues `_ + * - `IAM Policy Troubleshooter API `_ + - stable + - |PyPI-google-cloud-policy-troubleshooter| + - + - + - `Client Library Issues `_ + * - `IDS `_ + - stable + - |PyPI-google-cloud-ids| + - + - + - `Client Library Issues `_ * - `Identity and Access Management `_ - - |stable| + - stable - |PyPI-grpc-google-iam-v1| - * - `Key Management Service `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Identity and Access Management `_ + - stable + - |PyPI-google-cloud-iam| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Identity-Aware Proxy `_ + - stable + - |PyPI-google-cloud-iap| + - + - + - `Client Library Issues `_ + * - `Key Management Service `_ + - stable - |PyPI-google-cloud-kms| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Kubernetes Engine `_ + - stable + - |PyPI-google-cloud-container| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Live Stream `_ + - stable + - |PyPI-google-cloud-video-live-stream| + - + - + - `Client Library Issues `_ * - `Logging `_ - - |stable| + - stable - |PyPI-google-cloud-logging| - * - `Monitoring Dashboards `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Managed Service for Microsoft Active Directory `_ + - stable + - |PyPI-google-cloud-managed-identities| + - + - + - `Client Library Issues `_ + * - `Memorystore for Memcached `_ + - stable + - |PyPI-google-cloud-memcache| + - + - + - `Client Library Issues `_ + * - `Metrics Scopes `_ + - stable + - |PyPI-google-cloud-monitoring-metrics-scopes| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Monitoring Dashboards `_ + - stable - |PyPI-google-cloud-monitoring-dashboards| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `NDB Client Library for Datastore `_ - - |stable| + - stable - |PyPI-google-cloud-ndb| - * - `OS Login `_ - - |stable| + - + - + - `Client Library Issues `_ + * - `Natural Language `_ + - stable + - |PyPI-google-cloud-language| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Network Connectivity Center `_ + - stable + - |PyPI-google-cloud-network-connectivity| + - + - + - `Client Library Issues `_ + * - `Network Management `_ + - stable + - |PyPI-google-cloud-network-management| + - + - + - `Client Library Issues `_ + * - `OS Config `_ + - stable + - |PyPI-google-cloud-os-config| + - + - + - `Client Library Issues `_ + * - `OS Login `_ + - stable - |PyPI-google-cloud-os-login| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Optimization `_ + - stable + - |PyPI-google-cloud-optimization| + - + - + - `Client Library Issues `_ + * - `Organization Policy `_ + - stable + - |PyPI-google-cloud-org-policy| + - + - + - `Client Library Issues `_ * - `Pandas Data Types for SQL systems (BigQuery, Spanner) `_ - - |stable| + - stable - |PyPI-db-dtypes| + - + - + - `Client Library Issues `_ + * - `Private Certificate Authority `_ + - stable + - |PyPI-google-cloud-private-ca| + - + - + - `Client Library Issues `_ * - `Pub/Sub `_ - - |stable| + - stable - |PyPI-google-cloud-pubsub| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Pub/Sub Lite `_ - - |stable| + - stable - |PyPI-google-cloud-pubsublite| - * - `Service Management `_ - - |stable| + - + - + - `Client Library Issues `_ + * - `Recommender `_ + - stable + - |PyPI-google-cloud-recommender| + - + - + - `Client Library Issues `_ + * - `Redis `_ + - stable + - |PyPI-google-cloud-redis| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Resource Manager `_ + - stable + - |PyPI-google-cloud-resource-manager| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Resource Settings `_ + - stable + - |PyPI-google-cloud-resource-settings| + - + - + - `Client Library Issues `_ + * - `Retail `_ + - stable + - |PyPI-google-cloud-retail| + - + - + - `Client Library Issues `_ + * - `Scheduler `_ + - stable + - |PyPI-google-cloud-scheduler| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Secret Manager `_ + - stable + - |PyPI-google-cloud-secret-manager| + - + - + - `Client Library Issues `_ + * - `Security Command Center `_ + - stable + - |PyPI-google-cloud-securitycenter| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Security Scanner `_ + - stable + - |PyPI-google-cloud-websecurityscanner| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Service Control `_ + - stable + - |PyPI-google-cloud-service-control| + - + - + - `Client Library Issues `_ + * - `Service Directory `_ + - stable + - |PyPI-google-cloud-service-directory| + - + - + - `Client Library Issues `_ + * - `Service Management `_ + - stable - |PyPI-google-cloud-service-management| + - + - + - `Client Library Issues `_ + * - `Service Usage `_ + - stable + - |PyPI-google-cloud-service-usage| + - + - + - `Client Library Issues `_ + * - `Shell `_ + - stable + - |PyPI-google-cloud-shell| + - + - + - `Client Library Issues `_ + * - `Source Context `_ + - stable + - |PyPI-google-cloud-source-context| + - + - + - `Client Library Issues `_ * - `Spanner `_ - - |stable| + - stable - |PyPI-google-cloud-spanner| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Spanner Django `_ - - |stable| + - stable - |PyPI-django-google-spanner| - * - `Speech `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Speech `_ + - stable - |PyPI-google-cloud-speech| - * - `Stackdriver Monitoring `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Stackdriver Monitoring `_ + - stable - |PyPI-google-cloud-monitoring| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Storage `_ - - |stable| + - stable - |PyPI-google-cloud-storage| - * - `Trace `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Storage Control API `_ + - stable + - |PyPI-google-cloud-storage-control| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Storage Transfer Service `_ + - stable + - |PyPI-google-cloud-storage-transfer| + - + - + - `Client Library Issues `_ + * - `TPU `_ + - stable + - |PyPI-google-cloud-tpu| + - + - + - `Client Library Issues `_ + * - `Talent Solution `_ + - stable + - |PyPI-google-cloud-talent| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Tasks `_ + - stable + - |PyPI-google-cloud-tasks| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Text-to-Speech `_ + - stable + - |PyPI-google-cloud-texttospeech| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Trace `_ + - stable - |PyPI-google-cloud-trace| - * - `Translation `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Transcoder `_ + - stable + - |PyPI-google-cloud-video-transcoder| + - + - + - `Client Library Issues `_ + * - `Translation `_ + - stable - |PyPI-google-cloud-translate| - * - `Vision `_ - - |stable| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `VM Migration `_ + - stable + - |PyPI-google-cloud-vm-migration| + - + - + - `Client Library Issues `_ + * - `Video Intelligence `_ + - stable + - |PyPI-google-cloud-videointelligence| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Virtual Private Cloud `_ + - stable + - |PyPI-google-cloud-vpc-access| + - + - + - `Client Library Issues `_ + * - `Vision `_ + - stable - |PyPI-google-cloud-vision| + - + - + - `Client Library Issues `_ + * - `Web Risk `_ + - stable + - |PyPI-google-cloud-webrisk| + - + - + - `Client Library Issues `_ + * - `Workflows `_ + - stable + - |PyPI-google-cloud-workflows| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `reCAPTCHA Enterprise `_ + - stable + - |PyPI-google-cloud-recaptcha-enterprise| + - + - + - `Client Library Issues `_ * - `A unified Python API in BigQuery `_ - - |preview| + - preview - |PyPI-bigframes| - * - `Analytics Admin `_ - - |preview| + - + - + - `Client Library Issues `_ + * - `API Keys `_ + - preview + - |PyPI-google-cloud-api-keys| + - + - + - `Client Library Issues `_ + * - `Access Context Manager `_ + - preview + - |PyPI-google-cloud-access-context-manager| + - + - + - `Client Library Issues `_ + * - `Ad Manager `_ + - preview + - |PyPI-google-ads-admanager| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Address Validation API `_ + - preview + - |PyPI-google-maps-addressvalidation| + - + - + - `Client Library Issues `_ + * - `Advisory Notifications `_ + - preview + - |PyPI-google-cloud-advisorynotifications| + - + - + - `Client Library Issues `_ + * - `AlloyDB `_ + - preview + - |PyPI-google-cloud-alloydb| + - + - + - `Client Library Issues `_ + * - `AlloyDB connectors `_ + - preview + - |PyPI-google-cloud-alloydb-connectors| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Analytics Admin `_ + - preview - |PyPI-google-analytics-admin| - * - `Analytics Data `_ - - |preview| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Analytics Data `_ + - preview - |PyPI-google-analytics-data| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Anthos Multicloud `_ + - preview + - |PyPI-google-cloud-gke-multicloud| + - + - + - `Client Library Issues `_ + * - `Apache Kafka for BigQuery API `_ + - preview + - |PyPI-google-cloud-managedkafka| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Apigee Registry API `_ + - preview + - |PyPI-google-cloud-apigee-registry| + - + - + - `Client Library Issues `_ + * - `App Hub API `_ + - preview + - |PyPI-google-cloud-apphub| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Apps Card Protos `_ + - preview + - |PyPI-google-apps-card| + - + - + - `Client Library Issues `_ + * - `Apps Script Type Protos `_ + - preview + - |PyPI-google-apps-script-type| + - + - + - `Client Library Issues `_ + * - `Area 120 Tables `_ + - preview + - |PyPI-google-area120-tables| + - + - + - `Client Library Issues `_ * - `Audit Log `_ - - |preview| + - preview - |PyPI-google-cloud-audit-log| + - + - + - `Client Library Issues `_ + * - `Backup and DR Service API `_ + - preview + - |PyPI-google-cloud-backupdr| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Backup for GKE `_ + - preview + - |PyPI-google-cloud-gke-backup| + - + - + - `Client Library Issues `_ + * - `Batch `_ + - preview + - |PyPI-google-cloud-batch| + - + - + - `Client Library Issues `_ + * - `BeyondCorp AppConnections `_ + - preview + - |PyPI-google-cloud-beyondcorp-appconnections| + - + - + - `Client Library Issues `_ + * - `BeyondCorp AppConnectors `_ + - preview + - |PyPI-google-cloud-beyondcorp-appconnectors| + - + - + - `Client Library Issues `_ + * - `BeyondCorp AppGateways `_ + - preview + - |PyPI-google-cloud-beyondcorp-appgateways| + - + - + - `Client Library Issues `_ + * - `BeyondCorp ClientConnectorServices `_ + - preview + - |PyPI-google-cloud-beyondcorp-clientconnectorservices| + - + - + - `Client Library Issues `_ + * - `BeyondCorp ClientGateways `_ + - preview + - |PyPI-google-cloud-beyondcorp-clientgateways| + - + - + - `Client Library Issues `_ + * - `BigLake API `_ + - preview + - |PyPI-google-cloud-bigquery-biglake| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `BigQuery Analytics Hub `_ + - preview + - |PyPI-google-cloud-bigquery-analyticshub| + - + - + - `Client Library Issues `_ + * - `BigQuery Analytics Hub `_ + - preview + - |PyPI-google-cloud-bigquery-data-exchange| + - + - + - `Client Library Issues `_ + * - `BigQuery Data Policy `_ + - preview + - |PyPI-google-cloud-bigquery-datapolicies| + - + - + - `Client Library Issues `_ + * - `BigQuery Migration `_ + - preview + - |PyPI-google-cloud-bigquery-migration| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `BigQuery connector for pandas `_ - - |preview| + - preview - |PyPI-pandas-gbq| + - + - + - `Client Library Issues `_ + * - `CSS API `_ + - preview + - |PyPI-google-shopping-css| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Chat API `_ + - preview + - |PyPI-google-apps-chat| + - + - + - `Client Library Issues `_ + * - `Commerce Consumer Procurement API `_ + - preview + - |PyPI-google-cloud-commerce-consumer-procurement| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Confidential Computing API `_ + - preview + - |PyPI-google-cloud-confidentialcomputing| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Controls Partner API `_ + - preview + - |PyPI-google-cloud-cloudcontrolspartner| + - + - + - `Client Library Issues `_ * - `DNS `_ - - |preview| + - preview - |PyPI-google-cloud-dns| - * - `Dataflow `_ - - |preview| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Data Labeling `_ + - preview + - |PyPI-google-cloud-datalabeling| + - + - + - `Client Library Issues `_ + * - `Data Lineage API `_ + - preview + - |PyPI-google-cloud-datacatalog-lineage| + - + - + - `Client Library Issues `_ + * - `Data QnA `_ + - preview + - |PyPI-google-cloud-data-qna| + - + - + - `Client Library Issues `_ + * - `Dataflow `_ + - preview - |PyPI-google-cloud-dataflow-client| + - + - + - `Client Library Issues `_ + * - `Dataform `_ + - preview + - |PyPI-google-cloud-dataform| + - + - + - `Client Library Issues `_ + * - `Developer Connect API `_ + - preview + - |PyPI-google-cloud-developerconnect| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Discovery Engine API `_ + - preview + - |PyPI-google-cloud-discoveryengine| + - + - + - `Client Library Issues `_ + * - `Distributed Edge Container `_ + - preview + - |PyPI-google-cloud-edgecontainer| + - + - + - `Client Library Issues `_ + * - `Distributed Edge Network API `_ + - preview + - |PyPI-google-cloud-edgenetwork| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Document AI Toolbox `_ - - |preview| + - preview - |PyPI-google-cloud-documentai-toolbox| + - + - + - `Client Library Issues `_ + * - `Document AI Warehouse `_ + - preview + - |PyPI-google-cloud-contentwarehouse| + - + - + - `Client Library Issues `_ + * - `Enterprise Knowledge Graph `_ + - preview + - |PyPI-google-cloud-enterpriseknowledgegraph| + - + - + - `Client Library Issues `_ * - `Error Reporting `_ - - |preview| + - preview - |PyPI-google-cloud-error-reporting| - * - `Run `_ - - |preview| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Eventarc Publishing `_ + - preview + - |PyPI-google-cloud-eventarc-publishing| + - + - + - `Client Library Issues `_ + * - `GDC Hardware Management API `_ + - preview + - |PyPI-google-cloud-gdchardwaremanagement| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `GKE Connect Gateway `_ + - preview + - |PyPI-google-cloud-gke-connect-gateway| + - + - + - `Client Library Issues `_ + * - `Generative Language API `_ + - preview + - |PyPI-google-ai-generativelanguage| + - + - + - `Client Library Issues `_ + * - `Geo Type Protos `_ + - preview + - |PyPI-google-geo-type| + - + - + - `Client Library Issues `_ + * - `Infrastructure Manager API `_ + - preview + - |PyPI-google-cloud-config| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `KMS Inventory API `_ + - preview + - |PyPI-google-cloud-kms-inventory| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Last Mile Fleet Solution Delivery API `_ + - preview + - |PyPI-google-maps-fleetengine-delivery| + - + - + - `Client Library Issues `_ + * - `Life Sciences `_ + - preview + - |PyPI-google-cloud-life-sciences| + - + - + - `Client Library Issues `_ + * - `Local Rides and Deliveries API `_ + - preview + - |PyPI-google-maps-fleetengine| + - + - + - `Client Library Issues `_ + * - `Maps Platform Datasets API `_ + - preview + - |PyPI-google-maps-mapsplatformdatasets| + - + - + - `Client Library Issues `_ + * - `Maps Routing `_ + - preview + - |PyPI-google-maps-routing| + - + - + - `Client Library Issues `_ + * - `Media Translation `_ + - preview + - |PyPI-google-cloud-media-translation| + - + - + - `Client Library Issues `_ + * - `Meet API `_ + - preview + - |PyPI-google-apps-meet| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Memorystore for Redis API `_ + - preview + - |PyPI-google-cloud-redis-cluster| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-accounts| + - + - + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-conversions| + - + - + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-datasources| + - + - + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-lfp| + - + - + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-notifications| + - + - + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-products| + - + - + - `Client Library Issues `_ + * - `Merchant API `_ + - preview + - |PyPI-google-shopping-merchant-promotions| + - + - + - `Client Library Issues `_ + * - `Merchant Inventories API `_ + - preview + - |PyPI-google-shopping-merchant-inventories| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Merchant Reports API `_ + - preview + - |PyPI-google-shopping-merchant-reports| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Migration Center API `_ + - preview + - |PyPI-google-cloud-migrationcenter| + - + - + - `Client Library Issues `_ + * - `NetApp API `_ + - preview + - |PyPI-google-cloud-netapp| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Network Security `_ + - preview + - |PyPI-google-cloud-network-security| + - + - + - `Client Library Issues `_ + * - `Network Services `_ + - preview + - |PyPI-google-cloud-network-services| + - + - + - `Client Library Issues `_ + * - `Parallelstore API `_ + - preview + - |PyPI-google-cloud-parallelstore| + - + - + - `Client Library Issues `_ + * - `Phishing Protection `_ + - preview + - |PyPI-google-cloud-phishing-protection| + - + - + - `Client Library Issues `_ + * - `Places API `_ + - preview + - |PyPI-google-maps-places| + - + - + - `Client Library Issues `_ + * - `Policy Simulator API `_ + - preview + - |PyPI-google-cloud-policysimulator| + - + - + - `Client Library Issues `_ + * - `Policy Troubleshooter API `_ + - preview + - |PyPI-google-cloud-policytroubleshooter-iam| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Private Catalog `_ + - preview + - |PyPI-google-cloud-private-catalog| + - + - + - `Client Library Issues `_ + * - `Public Certificate Authority `_ + - preview + - |PyPI-google-cloud-public-ca| + - + - + - `Client Library Issues `_ + * - `Quotas API `_ + - preview + - |PyPI-google-cloud-cloudquotas| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Rapid Migration Assessment API `_ + - preview + - |PyPI-google-cloud-rapidmigrationassessment| + - + - + - `Client Library Issues `_ + * - `Recommendations AI `_ + - preview + - |PyPI-google-cloud-recommendations-ai| + - + - + - `Client Library Issues `_ + * - `Route Optimization API `_ + - preview + - |PyPI-google-maps-routeoptimization| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Run `_ + - preview - |PyPI-google-cloud-run| + - + - + - `Client Library Issues `_ * - `Runtime Configurator `_ - - |preview| + - preview - |PyPI-google-cloud-runtimeconfig| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `SQLAlchemy dialect for BigQuery `_ - - |preview| + - preview - |PyPI-sqlalchemy-bigquery| + - + - + - `Client Library Issues `_ + * - `Secure Source Manager API `_ + - preview + - |PyPI-google-cloud-securesourcemanager| + - + - + - `Client Library Issues `_ + * - `Security Center Management API `_ + - preview + - |PyPI-google-cloud-securitycentermanagement| + - + - + - `Client Library Issues `_ + * - `Service Health API `_ + - preview + - |PyPI-google-cloud-servicehealth| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Shopping Merchant Quota `_ + - preview + - |PyPI-google-shopping-merchant-quota| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Shopping Type Protos `_ + - preview + - |PyPI-google-shopping-type| + - + - + - `Client Library Issues `_ + * - `Solar API `_ + - preview + - |PyPI-google-maps-solar| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Storage Insights API `_ + - preview + - |PyPI-google-cloud-storageinsights| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Support API `_ + - preview + - |PyPI-google-cloud-support| + - + - + - `Client Library Issues `_ + * - `Telco Automation API `_ + - preview + - |PyPI-google-cloud-telcoautomation| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `VMware Engine `_ + - preview + - |PyPI-google-cloud-vmwareengine| + - + - + - `Client Library Issues `_ + * - `Video Stitcher `_ + - preview + - |PyPI-google-cloud-video-stitcher| + - + - + - `Client Library Issues `_ + * - `Vision AI API `_ + - preview + - |PyPI-google-cloud-visionai| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ + * - `Workspace Add-ons API `_ + - preview + - |PyPI-google-cloud-gsuiteaddons| + - + - + - `Client Library Issues `_ + * - `Workspace Events API `_ + - preview + - |PyPI-google-apps-events-subscriptions| + - + - + - `Client Library Issues `_ + * - `Workstations `_ + - preview + - |PyPI-google-cloud-workstations| + - + - + - `Client Library Issues `_ +.. |PyPI-google-crc32c| image:: https://img.shields.io/pypi/v/google-crc32c.svg + :target: https://pypi.org/project/google-crc32c .. |PyPI-google-cloud-aiplatform| image:: https://img.shields.io/pypi/v/google-cloud-aiplatform.svg :target: https://pypi.org/project/google-cloud-aiplatform +.. |PyPI-google-cloud-notebooks| image:: https://img.shields.io/pypi/v/google-cloud-notebooks.svg + :target: https://pypi.org/project/google-cloud-notebooks +.. |PyPI-google-cloud-api-gateway| image:: https://img.shields.io/pypi/v/google-cloud-api-gateway.svg + :target: https://pypi.org/project/google-cloud-api-gateway +.. |PyPI-google-cloud-access-approval| image:: https://img.shields.io/pypi/v/google-cloud-access-approval.svg + :target: https://pypi.org/project/google-cloud-access-approval +.. |PyPI-google-cloud-apigee-connect| image:: https://img.shields.io/pypi/v/google-cloud-apigee-connect.svg + :target: https://pypi.org/project/google-cloud-apigee-connect .. |PyPI-google-cloud-appengine-admin| image:: https://img.shields.io/pypi/v/google-cloud-appengine-admin.svg :target: https://pypi.org/project/google-cloud-appengine-admin +.. |PyPI-google-cloud-appengine-logging| image:: https://img.shields.io/pypi/v/google-cloud-appengine-logging.svg + :target: https://pypi.org/project/google-cloud-appengine-logging +.. |PyPI-google-cloud-artifact-registry| image:: https://img.shields.io/pypi/v/google-cloud-artifact-registry.svg + :target: https://pypi.org/project/google-cloud-artifact-registry .. |PyPI-google-cloud-asset| image:: https://img.shields.io/pypi/v/google-cloud-asset.svg :target: https://pypi.org/project/google-cloud-asset +.. |PyPI-google-cloud-assured-workloads| image:: https://img.shields.io/pypi/v/google-cloud-assured-workloads.svg + :target: https://pypi.org/project/google-cloud-assured-workloads .. |PyPI-google-cloud-automl| image:: https://img.shields.io/pypi/v/google-cloud-automl.svg :target: https://pypi.org/project/google-cloud-automl +.. |PyPI-google-cloud-bare-metal-solution| image:: https://img.shields.io/pypi/v/google-cloud-bare-metal-solution.svg + :target: https://pypi.org/project/google-cloud-bare-metal-solution .. |PyPI-google-cloud-bigquery| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg :target: https://pypi.org/project/google-cloud-bigquery +.. |PyPI-google-cloud-bigquery-connection| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-connection.svg + :target: https://pypi.org/project/google-cloud-bigquery-connection +.. |PyPI-google-cloud-bigquery-datatransfer| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-datatransfer.svg + :target: https://pypi.org/project/google-cloud-bigquery-datatransfer +.. |PyPI-google-cloud-bigquery-logging| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-logging.svg + :target: https://pypi.org/project/google-cloud-bigquery-logging +.. |PyPI-google-cloud-bigquery-reservation| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-reservation.svg + :target: https://pypi.org/project/google-cloud-bigquery-reservation .. |PyPI-google-cloud-bigquery-storage| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-storage.svg :target: https://pypi.org/project/google-cloud-bigquery-storage +.. |PyPI-bigquery-magics| image:: https://img.shields.io/pypi/v/bigquery-magics.svg + :target: https://pypi.org/project/bigquery-magics .. |PyPI-google-cloud-bigtable| image:: https://img.shields.io/pypi/v/google-cloud-bigtable.svg :target: https://pypi.org/project/google-cloud-bigtable +.. |PyPI-google-cloud-billing| image:: https://img.shields.io/pypi/v/google-cloud-billing.svg + :target: https://pypi.org/project/google-cloud-billing +.. |PyPI-google-cloud-billing-budgets| image:: https://img.shields.io/pypi/v/google-cloud-billing-budgets.svg + :target: https://pypi.org/project/google-cloud-billing-budgets .. |PyPI-google-cloud-binary-authorization| image:: https://img.shields.io/pypi/v/google-cloud-binary-authorization.svg :target: https://pypi.org/project/google-cloud-binary-authorization .. |PyPI-google-cloud-build| image:: https://img.shields.io/pypi/v/google-cloud-build.svg :target: https://pypi.org/project/google-cloud-build +.. |PyPI-google-cloud-certificate-manager| image:: https://img.shields.io/pypi/v/google-cloud-certificate-manager.svg + :target: https://pypi.org/project/google-cloud-certificate-manager +.. |PyPI-google-cloud-channel| image:: https://img.shields.io/pypi/v/google-cloud-channel.svg + :target: https://pypi.org/project/google-cloud-channel .. |PyPI-google-cloud-common| image:: https://img.shields.io/pypi/v/google-cloud-common.svg :target: https://pypi.org/project/google-cloud-common +.. |PyPI-google-cloud-orchestration-airflow| image:: https://img.shields.io/pypi/v/google-cloud-orchestration-airflow.svg + :target: https://pypi.org/project/google-cloud-orchestration-airflow .. |PyPI-google-cloud-compute| image:: https://img.shields.io/pypi/v/google-cloud-compute.svg :target: https://pypi.org/project/google-cloud-compute +.. |PyPI-google-cloud-contact-center-insights| image:: https://img.shields.io/pypi/v/google-cloud-contact-center-insights.svg + :target: https://pypi.org/project/google-cloud-contact-center-insights .. |PyPI-google-cloud-containeranalysis| image:: https://img.shields.io/pypi/v/google-cloud-containeranalysis.svg :target: https://pypi.org/project/google-cloud-containeranalysis +.. |PyPI-google-cloud-datacatalog| image:: https://img.shields.io/pypi/v/google-cloud-datacatalog.svg + :target: https://pypi.org/project/google-cloud-datacatalog +.. |PyPI-google-cloud-data-fusion| image:: https://img.shields.io/pypi/v/google-cloud-data-fusion.svg + :target: https://pypi.org/project/google-cloud-data-fusion +.. |PyPI-google-cloud-dlp| image:: https://img.shields.io/pypi/v/google-cloud-dlp.svg + :target: https://pypi.org/project/google-cloud-dlp +.. |PyPI-google-cloud-dms| image:: https://img.shields.io/pypi/v/google-cloud-dms.svg + :target: https://pypi.org/project/google-cloud-dms +.. |PyPI-google-cloud-dataplex| image:: https://img.shields.io/pypi/v/google-cloud-dataplex.svg + :target: https://pypi.org/project/google-cloud-dataplex +.. |PyPI-google-cloud-dataproc| image:: https://img.shields.io/pypi/v/google-cloud-dataproc.svg + :target: https://pypi.org/project/google-cloud-dataproc +.. |PyPI-google-cloud-dataproc-metastore| image:: https://img.shields.io/pypi/v/google-cloud-dataproc-metastore.svg + :target: https://pypi.org/project/google-cloud-dataproc-metastore .. |PyPI-google-cloud-datastore| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore +.. |PyPI-google-cloud-datastream| image:: https://img.shields.io/pypi/v/google-cloud-datastream.svg + :target: https://pypi.org/project/google-cloud-datastream +.. |PyPI-google-cloud-deploy| image:: https://img.shields.io/pypi/v/google-cloud-deploy.svg + :target: https://pypi.org/project/google-cloud-deploy +.. |PyPI-google-cloud-dialogflow| image:: https://img.shields.io/pypi/v/google-cloud-dialogflow.svg + :target: https://pypi.org/project/google-cloud-dialogflow +.. |PyPI-google-cloud-dialogflow-cx| image:: https://img.shields.io/pypi/v/google-cloud-dialogflow-cx.svg + :target: https://pypi.org/project/google-cloud-dialogflow-cx +.. |PyPI-google-cloud-documentai| image:: https://img.shields.io/pypi/v/google-cloud-documentai.svg + :target: https://pypi.org/project/google-cloud-documentai +.. |PyPI-google-cloud-domains| image:: https://img.shields.io/pypi/v/google-cloud-domains.svg + :target: https://pypi.org/project/google-cloud-domains +.. |PyPI-google-cloud-essential-contacts| image:: https://img.shields.io/pypi/v/google-cloud-essential-contacts.svg + :target: https://pypi.org/project/google-cloud-essential-contacts +.. |PyPI-google-cloud-eventarc| image:: https://img.shields.io/pypi/v/google-cloud-eventarc.svg + :target: https://pypi.org/project/google-cloud-eventarc .. |PyPI-google-cloud-filestore| image:: https://img.shields.io/pypi/v/google-cloud-filestore.svg :target: https://pypi.org/project/google-cloud-filestore .. |PyPI-google-cloud-firestore| image:: https://img.shields.io/pypi/v/google-cloud-firestore.svg :target: https://pypi.org/project/google-cloud-firestore +.. |PyPI-google-cloud-functions| image:: https://img.shields.io/pypi/v/google-cloud-functions.svg + :target: https://pypi.org/project/google-cloud-functions .. |PyPI-google-cloud-gke-hub| image:: https://img.shields.io/pypi/v/google-cloud-gke-hub.svg :target: https://pypi.org/project/google-cloud-gke-hub .. |PyPI-grafeas| image:: https://img.shields.io/pypi/v/grafeas.svg :target: https://pypi.org/project/grafeas +.. |PyPI-google-cloud-iam-logging| image:: https://img.shields.io/pypi/v/google-cloud-iam-logging.svg + :target: https://pypi.org/project/google-cloud-iam-logging +.. |PyPI-google-cloud-policy-troubleshooter| image:: https://img.shields.io/pypi/v/google-cloud-policy-troubleshooter.svg + :target: https://pypi.org/project/google-cloud-policy-troubleshooter +.. |PyPI-google-cloud-ids| image:: https://img.shields.io/pypi/v/google-cloud-ids.svg + :target: https://pypi.org/project/google-cloud-ids .. |PyPI-grpc-google-iam-v1| image:: https://img.shields.io/pypi/v/grpc-google-iam-v1.svg :target: https://pypi.org/project/grpc-google-iam-v1 +.. |PyPI-google-cloud-iam| image:: https://img.shields.io/pypi/v/google-cloud-iam.svg + :target: https://pypi.org/project/google-cloud-iam +.. |PyPI-google-cloud-iap| image:: https://img.shields.io/pypi/v/google-cloud-iap.svg + :target: https://pypi.org/project/google-cloud-iap .. |PyPI-google-cloud-kms| image:: https://img.shields.io/pypi/v/google-cloud-kms.svg :target: https://pypi.org/project/google-cloud-kms +.. |PyPI-google-cloud-container| image:: https://img.shields.io/pypi/v/google-cloud-container.svg + :target: https://pypi.org/project/google-cloud-container +.. |PyPI-google-cloud-video-live-stream| image:: https://img.shields.io/pypi/v/google-cloud-video-live-stream.svg + :target: https://pypi.org/project/google-cloud-video-live-stream .. |PyPI-google-cloud-logging| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging +.. |PyPI-google-cloud-managed-identities| image:: https://img.shields.io/pypi/v/google-cloud-managed-identities.svg + :target: https://pypi.org/project/google-cloud-managed-identities +.. |PyPI-google-cloud-memcache| image:: https://img.shields.io/pypi/v/google-cloud-memcache.svg + :target: https://pypi.org/project/google-cloud-memcache +.. |PyPI-google-cloud-monitoring-metrics-scopes| image:: https://img.shields.io/pypi/v/google-cloud-monitoring-metrics-scopes.svg + :target: https://pypi.org/project/google-cloud-monitoring-metrics-scopes .. |PyPI-google-cloud-monitoring-dashboards| image:: https://img.shields.io/pypi/v/google-cloud-monitoring-dashboards.svg :target: https://pypi.org/project/google-cloud-monitoring-dashboards .. |PyPI-google-cloud-ndb| image:: https://img.shields.io/pypi/v/google-cloud-ndb.svg :target: https://pypi.org/project/google-cloud-ndb +.. |PyPI-google-cloud-language| image:: https://img.shields.io/pypi/v/google-cloud-language.svg + :target: https://pypi.org/project/google-cloud-language +.. |PyPI-google-cloud-network-connectivity| image:: https://img.shields.io/pypi/v/google-cloud-network-connectivity.svg + :target: https://pypi.org/project/google-cloud-network-connectivity +.. |PyPI-google-cloud-network-management| image:: https://img.shields.io/pypi/v/google-cloud-network-management.svg + :target: https://pypi.org/project/google-cloud-network-management +.. |PyPI-google-cloud-os-config| image:: https://img.shields.io/pypi/v/google-cloud-os-config.svg + :target: https://pypi.org/project/google-cloud-os-config .. |PyPI-google-cloud-os-login| image:: https://img.shields.io/pypi/v/google-cloud-os-login.svg :target: https://pypi.org/project/google-cloud-os-login +.. |PyPI-google-cloud-optimization| image:: https://img.shields.io/pypi/v/google-cloud-optimization.svg + :target: https://pypi.org/project/google-cloud-optimization +.. |PyPI-google-cloud-org-policy| image:: https://img.shields.io/pypi/v/google-cloud-org-policy.svg + :target: https://pypi.org/project/google-cloud-org-policy .. |PyPI-db-dtypes| image:: https://img.shields.io/pypi/v/db-dtypes.svg :target: https://pypi.org/project/db-dtypes +.. |PyPI-google-cloud-private-ca| image:: https://img.shields.io/pypi/v/google-cloud-private-ca.svg + :target: https://pypi.org/project/google-cloud-private-ca .. |PyPI-google-cloud-pubsub| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub .. |PyPI-google-cloud-pubsublite| image:: https://img.shields.io/pypi/v/google-cloud-pubsublite.svg :target: https://pypi.org/project/google-cloud-pubsublite +.. |PyPI-google-cloud-recommender| image:: https://img.shields.io/pypi/v/google-cloud-recommender.svg + :target: https://pypi.org/project/google-cloud-recommender +.. |PyPI-google-cloud-redis| image:: https://img.shields.io/pypi/v/google-cloud-redis.svg + :target: https://pypi.org/project/google-cloud-redis +.. |PyPI-google-cloud-resource-manager| image:: https://img.shields.io/pypi/v/google-cloud-resource-manager.svg + :target: https://pypi.org/project/google-cloud-resource-manager +.. |PyPI-google-cloud-resource-settings| image:: https://img.shields.io/pypi/v/google-cloud-resource-settings.svg + :target: https://pypi.org/project/google-cloud-resource-settings +.. |PyPI-google-cloud-retail| image:: https://img.shields.io/pypi/v/google-cloud-retail.svg + :target: https://pypi.org/project/google-cloud-retail +.. |PyPI-google-cloud-scheduler| image:: https://img.shields.io/pypi/v/google-cloud-scheduler.svg + :target: https://pypi.org/project/google-cloud-scheduler +.. |PyPI-google-cloud-secret-manager| image:: https://img.shields.io/pypi/v/google-cloud-secret-manager.svg + :target: https://pypi.org/project/google-cloud-secret-manager +.. |PyPI-google-cloud-securitycenter| image:: https://img.shields.io/pypi/v/google-cloud-securitycenter.svg + :target: https://pypi.org/project/google-cloud-securitycenter +.. |PyPI-google-cloud-websecurityscanner| image:: https://img.shields.io/pypi/v/google-cloud-websecurityscanner.svg + :target: https://pypi.org/project/google-cloud-websecurityscanner +.. |PyPI-google-cloud-service-control| image:: https://img.shields.io/pypi/v/google-cloud-service-control.svg + :target: https://pypi.org/project/google-cloud-service-control +.. |PyPI-google-cloud-service-directory| image:: https://img.shields.io/pypi/v/google-cloud-service-directory.svg + :target: https://pypi.org/project/google-cloud-service-directory .. |PyPI-google-cloud-service-management| image:: https://img.shields.io/pypi/v/google-cloud-service-management.svg :target: https://pypi.org/project/google-cloud-service-management +.. |PyPI-google-cloud-service-usage| image:: https://img.shields.io/pypi/v/google-cloud-service-usage.svg + :target: https://pypi.org/project/google-cloud-service-usage +.. |PyPI-google-cloud-shell| image:: https://img.shields.io/pypi/v/google-cloud-shell.svg + :target: https://pypi.org/project/google-cloud-shell +.. |PyPI-google-cloud-source-context| image:: https://img.shields.io/pypi/v/google-cloud-source-context.svg + :target: https://pypi.org/project/google-cloud-source-context .. |PyPI-google-cloud-spanner| image:: https://img.shields.io/pypi/v/google-cloud-spanner.svg :target: https://pypi.org/project/google-cloud-spanner .. |PyPI-django-google-spanner| image:: https://img.shields.io/pypi/v/django-google-spanner.svg @@ -276,36 +1610,254 @@ Libraries :target: https://pypi.org/project/google-cloud-monitoring .. |PyPI-google-cloud-storage| image:: https://img.shields.io/pypi/v/google-cloud-storage.svg :target: https://pypi.org/project/google-cloud-storage +.. |PyPI-google-cloud-storage-control| image:: https://img.shields.io/pypi/v/google-cloud-storage-control.svg + :target: https://pypi.org/project/google-cloud-storage-control +.. |PyPI-google-cloud-storage-transfer| image:: https://img.shields.io/pypi/v/google-cloud-storage-transfer.svg + :target: https://pypi.org/project/google-cloud-storage-transfer +.. |PyPI-google-cloud-tpu| image:: https://img.shields.io/pypi/v/google-cloud-tpu.svg + :target: https://pypi.org/project/google-cloud-tpu +.. |PyPI-google-cloud-talent| image:: https://img.shields.io/pypi/v/google-cloud-talent.svg + :target: https://pypi.org/project/google-cloud-talent +.. |PyPI-google-cloud-tasks| image:: https://img.shields.io/pypi/v/google-cloud-tasks.svg + :target: https://pypi.org/project/google-cloud-tasks +.. |PyPI-google-cloud-texttospeech| image:: https://img.shields.io/pypi/v/google-cloud-texttospeech.svg + :target: https://pypi.org/project/google-cloud-texttospeech .. |PyPI-google-cloud-trace| image:: https://img.shields.io/pypi/v/google-cloud-trace.svg :target: https://pypi.org/project/google-cloud-trace +.. |PyPI-google-cloud-video-transcoder| image:: https://img.shields.io/pypi/v/google-cloud-video-transcoder.svg + :target: https://pypi.org/project/google-cloud-video-transcoder .. |PyPI-google-cloud-translate| image:: https://img.shields.io/pypi/v/google-cloud-translate.svg :target: https://pypi.org/project/google-cloud-translate +.. |PyPI-google-cloud-vm-migration| image:: https://img.shields.io/pypi/v/google-cloud-vm-migration.svg + :target: https://pypi.org/project/google-cloud-vm-migration +.. |PyPI-google-cloud-videointelligence| image:: https://img.shields.io/pypi/v/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence +.. |PyPI-google-cloud-vpc-access| image:: https://img.shields.io/pypi/v/google-cloud-vpc-access.svg + :target: https://pypi.org/project/google-cloud-vpc-access .. |PyPI-google-cloud-vision| image:: https://img.shields.io/pypi/v/google-cloud-vision.svg :target: https://pypi.org/project/google-cloud-vision +.. |PyPI-google-cloud-webrisk| image:: https://img.shields.io/pypi/v/google-cloud-webrisk.svg + :target: https://pypi.org/project/google-cloud-webrisk +.. |PyPI-google-cloud-workflows| image:: https://img.shields.io/pypi/v/google-cloud-workflows.svg + :target: https://pypi.org/project/google-cloud-workflows +.. |PyPI-google-cloud-recaptcha-enterprise| image:: https://img.shields.io/pypi/v/google-cloud-recaptcha-enterprise.svg + :target: https://pypi.org/project/google-cloud-recaptcha-enterprise .. |PyPI-bigframes| image:: https://img.shields.io/pypi/v/bigframes.svg :target: https://pypi.org/project/bigframes +.. |PyPI-google-cloud-api-keys| image:: https://img.shields.io/pypi/v/google-cloud-api-keys.svg + :target: https://pypi.org/project/google-cloud-api-keys +.. |PyPI-google-cloud-access-context-manager| image:: https://img.shields.io/pypi/v/google-cloud-access-context-manager.svg + :target: https://pypi.org/project/google-cloud-access-context-manager +.. |PyPI-google-ads-admanager| image:: https://img.shields.io/pypi/v/google-ads-admanager.svg + :target: https://pypi.org/project/google-ads-admanager +.. |PyPI-google-maps-addressvalidation| image:: https://img.shields.io/pypi/v/google-maps-addressvalidation.svg + :target: https://pypi.org/project/google-maps-addressvalidation +.. |PyPI-google-cloud-advisorynotifications| image:: https://img.shields.io/pypi/v/google-cloud-advisorynotifications.svg + :target: https://pypi.org/project/google-cloud-advisorynotifications +.. |PyPI-google-cloud-alloydb| image:: https://img.shields.io/pypi/v/google-cloud-alloydb.svg + :target: https://pypi.org/project/google-cloud-alloydb +.. |PyPI-google-cloud-alloydb-connectors| image:: https://img.shields.io/pypi/v/google-cloud-alloydb-connectors.svg + :target: https://pypi.org/project/google-cloud-alloydb-connectors .. |PyPI-google-analytics-admin| image:: https://img.shields.io/pypi/v/google-analytics-admin.svg :target: https://pypi.org/project/google-analytics-admin .. |PyPI-google-analytics-data| image:: https://img.shields.io/pypi/v/google-analytics-data.svg :target: https://pypi.org/project/google-analytics-data +.. |PyPI-google-cloud-gke-multicloud| image:: https://img.shields.io/pypi/v/google-cloud-gke-multicloud.svg + :target: https://pypi.org/project/google-cloud-gke-multicloud +.. |PyPI-google-cloud-managedkafka| image:: https://img.shields.io/pypi/v/google-cloud-managedkafka.svg + :target: https://pypi.org/project/google-cloud-managedkafka +.. |PyPI-google-cloud-apigee-registry| image:: https://img.shields.io/pypi/v/google-cloud-apigee-registry.svg + :target: https://pypi.org/project/google-cloud-apigee-registry +.. |PyPI-google-cloud-apphub| image:: https://img.shields.io/pypi/v/google-cloud-apphub.svg + :target: https://pypi.org/project/google-cloud-apphub +.. |PyPI-google-apps-card| image:: https://img.shields.io/pypi/v/google-apps-card.svg + :target: https://pypi.org/project/google-apps-card +.. |PyPI-google-apps-script-type| image:: https://img.shields.io/pypi/v/google-apps-script-type.svg + :target: https://pypi.org/project/google-apps-script-type +.. |PyPI-google-area120-tables| image:: https://img.shields.io/pypi/v/google-area120-tables.svg + :target: https://pypi.org/project/google-area120-tables .. |PyPI-google-cloud-audit-log| image:: https://img.shields.io/pypi/v/google-cloud-audit-log.svg :target: https://pypi.org/project/google-cloud-audit-log +.. |PyPI-google-cloud-backupdr| image:: https://img.shields.io/pypi/v/google-cloud-backupdr.svg + :target: https://pypi.org/project/google-cloud-backupdr +.. |PyPI-google-cloud-gke-backup| image:: https://img.shields.io/pypi/v/google-cloud-gke-backup.svg + :target: https://pypi.org/project/google-cloud-gke-backup +.. |PyPI-google-cloud-batch| image:: https://img.shields.io/pypi/v/google-cloud-batch.svg + :target: https://pypi.org/project/google-cloud-batch +.. |PyPI-google-cloud-beyondcorp-appconnections| image:: https://img.shields.io/pypi/v/google-cloud-beyondcorp-appconnections.svg + :target: https://pypi.org/project/google-cloud-beyondcorp-appconnections +.. |PyPI-google-cloud-beyondcorp-appconnectors| image:: https://img.shields.io/pypi/v/google-cloud-beyondcorp-appconnectors.svg + :target: https://pypi.org/project/google-cloud-beyondcorp-appconnectors +.. |PyPI-google-cloud-beyondcorp-appgateways| image:: https://img.shields.io/pypi/v/google-cloud-beyondcorp-appgateways.svg + :target: https://pypi.org/project/google-cloud-beyondcorp-appgateways +.. |PyPI-google-cloud-beyondcorp-clientconnectorservices| image:: https://img.shields.io/pypi/v/google-cloud-beyondcorp-clientconnectorservices.svg + :target: https://pypi.org/project/google-cloud-beyondcorp-clientconnectorservices +.. |PyPI-google-cloud-beyondcorp-clientgateways| image:: https://img.shields.io/pypi/v/google-cloud-beyondcorp-clientgateways.svg + :target: https://pypi.org/project/google-cloud-beyondcorp-clientgateways +.. |PyPI-google-cloud-bigquery-biglake| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-biglake.svg + :target: https://pypi.org/project/google-cloud-bigquery-biglake +.. |PyPI-google-cloud-bigquery-analyticshub| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-analyticshub.svg + :target: https://pypi.org/project/google-cloud-bigquery-analyticshub +.. |PyPI-google-cloud-bigquery-data-exchange| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-data-exchange.svg + :target: https://pypi.org/project/google-cloud-bigquery-data-exchange +.. |PyPI-google-cloud-bigquery-datapolicies| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-datapolicies.svg + :target: https://pypi.org/project/google-cloud-bigquery-datapolicies +.. |PyPI-google-cloud-bigquery-migration| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-migration.svg + :target: https://pypi.org/project/google-cloud-bigquery-migration .. |PyPI-pandas-gbq| image:: https://img.shields.io/pypi/v/pandas-gbq.svg :target: https://pypi.org/project/pandas-gbq +.. |PyPI-google-shopping-css| image:: https://img.shields.io/pypi/v/google-shopping-css.svg + :target: https://pypi.org/project/google-shopping-css +.. |PyPI-google-apps-chat| image:: https://img.shields.io/pypi/v/google-apps-chat.svg + :target: https://pypi.org/project/google-apps-chat +.. |PyPI-google-cloud-commerce-consumer-procurement| image:: https://img.shields.io/pypi/v/google-cloud-commerce-consumer-procurement.svg + :target: https://pypi.org/project/google-cloud-commerce-consumer-procurement +.. |PyPI-google-cloud-confidentialcomputing| image:: https://img.shields.io/pypi/v/google-cloud-confidentialcomputing.svg + :target: https://pypi.org/project/google-cloud-confidentialcomputing +.. |PyPI-google-cloud-cloudcontrolspartner| image:: https://img.shields.io/pypi/v/google-cloud-cloudcontrolspartner.svg + :target: https://pypi.org/project/google-cloud-cloudcontrolspartner .. |PyPI-google-cloud-dns| image:: https://img.shields.io/pypi/v/google-cloud-dns.svg :target: https://pypi.org/project/google-cloud-dns +.. |PyPI-google-cloud-datalabeling| image:: https://img.shields.io/pypi/v/google-cloud-datalabeling.svg + :target: https://pypi.org/project/google-cloud-datalabeling +.. |PyPI-google-cloud-datacatalog-lineage| image:: https://img.shields.io/pypi/v/google-cloud-datacatalog-lineage.svg + :target: https://pypi.org/project/google-cloud-datacatalog-lineage +.. |PyPI-google-cloud-data-qna| image:: https://img.shields.io/pypi/v/google-cloud-data-qna.svg + :target: https://pypi.org/project/google-cloud-data-qna .. |PyPI-google-cloud-dataflow-client| image:: https://img.shields.io/pypi/v/google-cloud-dataflow-client.svg :target: https://pypi.org/project/google-cloud-dataflow-client +.. |PyPI-google-cloud-dataform| image:: https://img.shields.io/pypi/v/google-cloud-dataform.svg + :target: https://pypi.org/project/google-cloud-dataform +.. |PyPI-google-cloud-developerconnect| image:: https://img.shields.io/pypi/v/google-cloud-developerconnect.svg + :target: https://pypi.org/project/google-cloud-developerconnect +.. |PyPI-google-cloud-discoveryengine| image:: https://img.shields.io/pypi/v/google-cloud-discoveryengine.svg + :target: https://pypi.org/project/google-cloud-discoveryengine +.. |PyPI-google-cloud-edgecontainer| image:: https://img.shields.io/pypi/v/google-cloud-edgecontainer.svg + :target: https://pypi.org/project/google-cloud-edgecontainer +.. |PyPI-google-cloud-edgenetwork| image:: https://img.shields.io/pypi/v/google-cloud-edgenetwork.svg + :target: https://pypi.org/project/google-cloud-edgenetwork .. |PyPI-google-cloud-documentai-toolbox| image:: https://img.shields.io/pypi/v/google-cloud-documentai-toolbox.svg :target: https://pypi.org/project/google-cloud-documentai-toolbox +.. |PyPI-google-cloud-contentwarehouse| image:: https://img.shields.io/pypi/v/google-cloud-contentwarehouse.svg + :target: https://pypi.org/project/google-cloud-contentwarehouse +.. |PyPI-google-cloud-enterpriseknowledgegraph| image:: https://img.shields.io/pypi/v/google-cloud-enterpriseknowledgegraph.svg + :target: https://pypi.org/project/google-cloud-enterpriseknowledgegraph .. |PyPI-google-cloud-error-reporting| image:: https://img.shields.io/pypi/v/google-cloud-error-reporting.svg :target: https://pypi.org/project/google-cloud-error-reporting +.. |PyPI-google-cloud-eventarc-publishing| image:: https://img.shields.io/pypi/v/google-cloud-eventarc-publishing.svg + :target: https://pypi.org/project/google-cloud-eventarc-publishing +.. |PyPI-google-cloud-gdchardwaremanagement| image:: https://img.shields.io/pypi/v/google-cloud-gdchardwaremanagement.svg + :target: https://pypi.org/project/google-cloud-gdchardwaremanagement +.. |PyPI-google-cloud-gke-connect-gateway| image:: https://img.shields.io/pypi/v/google-cloud-gke-connect-gateway.svg + :target: https://pypi.org/project/google-cloud-gke-connect-gateway +.. |PyPI-google-ai-generativelanguage| image:: https://img.shields.io/pypi/v/google-ai-generativelanguage.svg + :target: https://pypi.org/project/google-ai-generativelanguage +.. |PyPI-google-geo-type| image:: https://img.shields.io/pypi/v/google-geo-type.svg + :target: https://pypi.org/project/google-geo-type +.. |PyPI-google-cloud-config| image:: https://img.shields.io/pypi/v/google-cloud-config.svg + :target: https://pypi.org/project/google-cloud-config +.. |PyPI-google-cloud-kms-inventory| image:: https://img.shields.io/pypi/v/google-cloud-kms-inventory.svg + :target: https://pypi.org/project/google-cloud-kms-inventory +.. |PyPI-google-maps-fleetengine-delivery| image:: https://img.shields.io/pypi/v/google-maps-fleetengine-delivery.svg + :target: https://pypi.org/project/google-maps-fleetengine-delivery +.. |PyPI-google-cloud-life-sciences| image:: https://img.shields.io/pypi/v/google-cloud-life-sciences.svg + :target: https://pypi.org/project/google-cloud-life-sciences +.. |PyPI-google-maps-fleetengine| image:: https://img.shields.io/pypi/v/google-maps-fleetengine.svg + :target: https://pypi.org/project/google-maps-fleetengine +.. |PyPI-google-maps-mapsplatformdatasets| image:: https://img.shields.io/pypi/v/google-maps-mapsplatformdatasets.svg + :target: https://pypi.org/project/google-maps-mapsplatformdatasets +.. |PyPI-google-maps-routing| image:: https://img.shields.io/pypi/v/google-maps-routing.svg + :target: https://pypi.org/project/google-maps-routing +.. |PyPI-google-cloud-media-translation| image:: https://img.shields.io/pypi/v/google-cloud-media-translation.svg + :target: https://pypi.org/project/google-cloud-media-translation +.. |PyPI-google-apps-meet| image:: https://img.shields.io/pypi/v/google-apps-meet.svg + :target: https://pypi.org/project/google-apps-meet +.. |PyPI-google-cloud-redis-cluster| image:: https://img.shields.io/pypi/v/google-cloud-redis-cluster.svg + :target: https://pypi.org/project/google-cloud-redis-cluster +.. |PyPI-google-shopping-merchant-accounts| image:: https://img.shields.io/pypi/v/google-shopping-merchant-accounts.svg + :target: https://pypi.org/project/google-shopping-merchant-accounts +.. |PyPI-google-shopping-merchant-conversions| image:: https://img.shields.io/pypi/v/google-shopping-merchant-conversions.svg + :target: https://pypi.org/project/google-shopping-merchant-conversions +.. |PyPI-google-shopping-merchant-datasources| image:: https://img.shields.io/pypi/v/google-shopping-merchant-datasources.svg + :target: https://pypi.org/project/google-shopping-merchant-datasources +.. |PyPI-google-shopping-merchant-lfp| image:: https://img.shields.io/pypi/v/google-shopping-merchant-lfp.svg + :target: https://pypi.org/project/google-shopping-merchant-lfp +.. |PyPI-google-shopping-merchant-notifications| image:: https://img.shields.io/pypi/v/google-shopping-merchant-notifications.svg + :target: https://pypi.org/project/google-shopping-merchant-notifications +.. |PyPI-google-shopping-merchant-products| image:: https://img.shields.io/pypi/v/google-shopping-merchant-products.svg + :target: https://pypi.org/project/google-shopping-merchant-products +.. |PyPI-google-shopping-merchant-promotions| image:: https://img.shields.io/pypi/v/google-shopping-merchant-promotions.svg + :target: https://pypi.org/project/google-shopping-merchant-promotions +.. |PyPI-google-shopping-merchant-inventories| image:: https://img.shields.io/pypi/v/google-shopping-merchant-inventories.svg + :target: https://pypi.org/project/google-shopping-merchant-inventories +.. |PyPI-google-shopping-merchant-reports| image:: https://img.shields.io/pypi/v/google-shopping-merchant-reports.svg + :target: https://pypi.org/project/google-shopping-merchant-reports +.. |PyPI-google-cloud-migrationcenter| image:: https://img.shields.io/pypi/v/google-cloud-migrationcenter.svg + :target: https://pypi.org/project/google-cloud-migrationcenter +.. |PyPI-google-cloud-netapp| image:: https://img.shields.io/pypi/v/google-cloud-netapp.svg + :target: https://pypi.org/project/google-cloud-netapp +.. |PyPI-google-cloud-network-security| image:: https://img.shields.io/pypi/v/google-cloud-network-security.svg + :target: https://pypi.org/project/google-cloud-network-security +.. |PyPI-google-cloud-network-services| image:: https://img.shields.io/pypi/v/google-cloud-network-services.svg + :target: https://pypi.org/project/google-cloud-network-services +.. |PyPI-google-cloud-parallelstore| image:: https://img.shields.io/pypi/v/google-cloud-parallelstore.svg + :target: https://pypi.org/project/google-cloud-parallelstore +.. |PyPI-google-cloud-phishing-protection| image:: https://img.shields.io/pypi/v/google-cloud-phishing-protection.svg + :target: https://pypi.org/project/google-cloud-phishing-protection +.. |PyPI-google-maps-places| image:: https://img.shields.io/pypi/v/google-maps-places.svg + :target: https://pypi.org/project/google-maps-places +.. |PyPI-google-cloud-policysimulator| image:: https://img.shields.io/pypi/v/google-cloud-policysimulator.svg + :target: https://pypi.org/project/google-cloud-policysimulator +.. |PyPI-google-cloud-policytroubleshooter-iam| image:: https://img.shields.io/pypi/v/google-cloud-policytroubleshooter-iam.svg + :target: https://pypi.org/project/google-cloud-policytroubleshooter-iam +.. |PyPI-google-cloud-private-catalog| image:: https://img.shields.io/pypi/v/google-cloud-private-catalog.svg + :target: https://pypi.org/project/google-cloud-private-catalog +.. |PyPI-google-cloud-public-ca| image:: https://img.shields.io/pypi/v/google-cloud-public-ca.svg + :target: https://pypi.org/project/google-cloud-public-ca +.. |PyPI-google-cloud-cloudquotas| image:: https://img.shields.io/pypi/v/google-cloud-cloudquotas.svg + :target: https://pypi.org/project/google-cloud-cloudquotas +.. |PyPI-google-cloud-rapidmigrationassessment| image:: https://img.shields.io/pypi/v/google-cloud-rapidmigrationassessment.svg + :target: https://pypi.org/project/google-cloud-rapidmigrationassessment +.. |PyPI-google-cloud-recommendations-ai| image:: https://img.shields.io/pypi/v/google-cloud-recommendations-ai.svg + :target: https://pypi.org/project/google-cloud-recommendations-ai +.. |PyPI-google-maps-routeoptimization| image:: https://img.shields.io/pypi/v/google-maps-routeoptimization.svg + :target: https://pypi.org/project/google-maps-routeoptimization .. |PyPI-google-cloud-run| image:: https://img.shields.io/pypi/v/google-cloud-run.svg :target: https://pypi.org/project/google-cloud-run .. |PyPI-google-cloud-runtimeconfig| image:: https://img.shields.io/pypi/v/google-cloud-runtimeconfig.svg :target: https://pypi.org/project/google-cloud-runtimeconfig .. |PyPI-sqlalchemy-bigquery| image:: https://img.shields.io/pypi/v/sqlalchemy-bigquery.svg :target: https://pypi.org/project/sqlalchemy-bigquery +.. |PyPI-google-cloud-securesourcemanager| image:: https://img.shields.io/pypi/v/google-cloud-securesourcemanager.svg + :target: https://pypi.org/project/google-cloud-securesourcemanager +.. |PyPI-google-cloud-securitycentermanagement| image:: https://img.shields.io/pypi/v/google-cloud-securitycentermanagement.svg + :target: https://pypi.org/project/google-cloud-securitycentermanagement +.. |PyPI-google-cloud-servicehealth| image:: https://img.shields.io/pypi/v/google-cloud-servicehealth.svg + :target: https://pypi.org/project/google-cloud-servicehealth +.. |PyPI-google-shopping-merchant-quota| image:: https://img.shields.io/pypi/v/google-shopping-merchant-quota.svg + :target: https://pypi.org/project/google-shopping-merchant-quota +.. |PyPI-google-shopping-type| image:: https://img.shields.io/pypi/v/google-shopping-type.svg + :target: https://pypi.org/project/google-shopping-type +.. |PyPI-google-maps-solar| image:: https://img.shields.io/pypi/v/google-maps-solar.svg + :target: https://pypi.org/project/google-maps-solar +.. |PyPI-google-cloud-storageinsights| image:: https://img.shields.io/pypi/v/google-cloud-storageinsights.svg + :target: https://pypi.org/project/google-cloud-storageinsights +.. |PyPI-google-cloud-support| image:: https://img.shields.io/pypi/v/google-cloud-support.svg + :target: https://pypi.org/project/google-cloud-support +.. |PyPI-google-cloud-telcoautomation| image:: https://img.shields.io/pypi/v/google-cloud-telcoautomation.svg + :target: https://pypi.org/project/google-cloud-telcoautomation +.. |PyPI-google-cloud-vmwareengine| image:: https://img.shields.io/pypi/v/google-cloud-vmwareengine.svg + :target: https://pypi.org/project/google-cloud-vmwareengine +.. |PyPI-google-cloud-video-stitcher| image:: https://img.shields.io/pypi/v/google-cloud-video-stitcher.svg + :target: https://pypi.org/project/google-cloud-video-stitcher +.. |PyPI-google-cloud-visionai| image:: https://img.shields.io/pypi/v/google-cloud-visionai.svg + :target: https://pypi.org/project/google-cloud-visionai +.. |PyPI-google-cloud-gsuiteaddons| image:: https://img.shields.io/pypi/v/google-cloud-gsuiteaddons.svg + :target: https://pypi.org/project/google-cloud-gsuiteaddons +.. |PyPI-google-apps-events-subscriptions| image:: https://img.shields.io/pypi/v/google-apps-events-subscriptions.svg + :target: https://pypi.org/project/google-apps-events-subscriptions +.. |PyPI-google-cloud-workstations| image:: https://img.shields.io/pypi/v/google-cloud-workstations.svg + :target: https://pypi.org/project/google-cloud-workstations .. API_TABLE_END @@ -324,11 +1876,11 @@ Example Applications ******************** - `getting-started-python`_ - A sample and `tutorial`_ that demonstrates how to build a complete web application using Cloud Datastore, Cloud Storage, and Cloud Pub/Sub and deploy it to Google App Engine or Google Compute Engine. -- `google-cloud-python-expenses-demo`_ - A sample expenses demo using Cloud Datastore and Cloud Storage. +- `python-docs-samples`_ - Python samples for Google Cloud Platform products. .. _getting-started-python: https://github.com/GoogleCloudPlatform/getting-started-python .. _tutorial: https://cloud.google.com/python -.. _google-cloud-python-expenses-demo: https://github.com/GoogleCloudPlatform/google-cloud-python-expenses-demo +.. _python-docs-samples: https://github.com/GoogleCloudPlatform/python-docs-samples Authentication diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 000000000000..13c64a683c75 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,59 @@ +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing it on GitHub. This will ensure a timely response. + +To help us resolve your issue as soon as possible, please follow these steps: + + + + +# Check for already opened issues: + +- In `google-cloud-python`: + + + + +# *BEFORE* reporting an issue here + +- Please determine whether it's a client library issue or an underlying API service issue. + - **Bugs**: Try reproducing the issue without the client library; if you can, then it's an API issue. + - **Features**: Determine whether you want something different about the data or how it's changed in Google services (rather than how you access or manipulate it locally); if so, it's an API issue. +- To make these determinations, visit [Google APIs Explorer](https://developers.google.com/apis-explorer): + 1. Find your API on the [APIs Explorer list](https://developers.google.com/apis-explorer), and follow the link. + 2. Click on the API method corresponding to your issue. + 3. In the page that appears: + - You can use the APIs Explorer sidebar (“Try this Method”), fill in your request parameters, and see whether your request succeeds or causes the same error you were getting initially. + - Alternatively, you can use the `HTTP Request` listed on that page and any information on the `Request body` to construct a request you can send from the command line using a tool like `curl`. + 4. If your request in step 3 above succeeds and is as you expect, then it's likely the resolution centers on the client library; [file a client library issue](#orge13c134) as detailed below. If your request failed or returned unexpected results, then it's likely the resolution centers on the API service; [file an API service issue](#orgb8af98c) instead, as detailed below. + + + + +# Filing an API service issue + +*(if the issue DID re-occur [above](#orge74b7aa) without using the client library)* + +Use the appropriate API service issue tracker. The maintainers of this repo are not experts or contributors on individual API services; we work on generating usable and idiomatic client libraries for many APIs. + +1. Consider using [Google Cloud Customer Care](https://cloud.google.com/support/?hl=en) (paid) to get more dedicated support for your issue. Otherwise, continue with the steps below. +2. Find your API in [the client list at the top of this repository](https://github.com/googleapis/google-cloud-python/tree/main?tab=readme-ov-file#libraries). +3. Check the “API Issues” column to see whether someone else has reported the same issue. If they have, the filed issue may have some useful information; feel free to add more details or context of your own. +4. If your issue has not been filed, you can click on “File an API Issue” from the same [client list](https://github.com/googleapis/google-cloud-python/tree/main?tab=readme-ov-file#libraries) to notify the API service team. Be as complete yet succinct as you can! +5. If you don't see the "API Issues" column for this API (we're in the process of filling out the table), go ahead and file an issue in this repository and make a note that it's likely a service-side issue. We will route it to the right service team. + + + + +# Filing a client library issue + +*(if the issue DID NOT re-occur [above](#orge74b7aa) without using the client library)* + +Consider using [Google Cloud Customer Care](https://cloud.google.com/support/?hl=en) (paid) to get more dedicated support for your issue. Otherwise, continue with the steps below. + +- Determine the right repository in which to file: + - If you fetched the API-specific PyPI package, you are probably using one of the modern Python [Cloud Client Libraries](https://cloud.google.com/apis/docs/cloud-client-libraries). Go to [the client list at the top of this repository](https://github.com/googleapis/google-cloud-python/tree/main?tab=readme-ov-file#libraries), which lists them all, and click on the name of your client. That will take you to the correct repository in which to file an issue (for the many APIs listed under [`packages/`](https://github.com/googleapis/google-cloud-python/tree/main/packages), it's this repo, `google-cloud-python`). + - If you're using the package `googleapiclient` in your code, you are using the older Python [Google API Client Libraries](https://developers.google.com/api-client-library/), whose source code is hosted in the repository [google-api-python-client](https://github.com/googleapis/google-api-python-client); please file an issue there for those. + - For more information on the difference between Cloud Client Libraries and Google API Client Libraries, see [Client Libraries Explained](https://cloud.google.com/apis/docs/client-libraries-explained). +- Go to the correct repository, as identified above + 1. Search for issues already opened (in this repository, those would be in ). If you find your problem already filed, just add any more context or details that seem appropriate. + 2. If your problem has not been previously filed, file an issue. If you're filing in this repository, choose either the [bug report](https://github.com/googleapis/google-cloud-python/issues/new?template=bug_report.yaml) or [feature request](https://github.com/googleapis/google-cloud-python/issues/new?template=feature_request.yaml) template, fill in the details, and submit the issue with an informative title! + diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index 7470266e8256..a0b75c980b4e 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,43 @@ # Changelog +## [0.6.6](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.5...google-ai-generativelanguage-v0.6.6) (2024-06-26) + + +### Features + +* [google-ai-generativelanguage] Add code execution ([#12843](https://github.com/googleapis/google-cloud-python/issues/12843)) ([e4fcb00](https://github.com/googleapis/google-cloud-python/commit/e4fcb0097b4f6debbcf584bd3d35a8281a954cfd)) + +## [0.6.5](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.4...google-ai-generativelanguage-v0.6.5) (2024-06-11) + + +### Features + +* Add cached_content_token_count to CountTokensResponse ([09c7fae](https://github.com/googleapis/google-cloud-python/commit/09c7fae459bc3eb91bfcb795384245e7fa4bf7ff)) +* Add cached_content_token_count to generative_service's UsageMetadata ([09c7fae](https://github.com/googleapis/google-cloud-python/commit/09c7fae459bc3eb91bfcb795384245e7fa4bf7ff)) +* Add content caching ([09c7fae](https://github.com/googleapis/google-cloud-python/commit/09c7fae459bc3eb91bfcb795384245e7fa4bf7ff)) + + +### Documentation + +* Small fixes ([09c7fae](https://github.com/googleapis/google-cloud-python/commit/09c7fae459bc3eb91bfcb795384245e7fa4bf7ff)) + +## [0.6.4](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.3...google-ai-generativelanguage-v0.6.4) (2024-05-16) + + +### Features + +* **v1:** Add generate_content_request to CountTokensRequest ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) +* **v1:** Add usage metadata to GenerateContentResponse ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) +* **v1beta:** Add video metadata to files API ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) +* **v1beta:** Update timeouts for generate content ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) +* **v1:** Update timeouts ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) + + +### Documentation + +* **v1beta:** Minor updates ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) +* **v1:** Minor updates ([e5dd7ed](https://github.com/googleapis/google-cloud-python/commit/e5dd7eddcea1f3f22a023a42e0cc80d93a06ccfc)) + ## [0.6.3](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.2...google-ai-generativelanguage-v0.6.3) (2024-05-07) diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/cache_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/cache_service.rst new file mode 100644 index 000000000000..c9aa05ee3edd --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/cache_service.rst @@ -0,0 +1,10 @@ +CacheService +------------------------------ + +.. automodule:: google.ai.generativelanguage_v1beta.services.cache_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1beta.services.cache_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst index 0b7705eebe0c..24e6184e8b0e 100644 --- a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst @@ -3,6 +3,7 @@ Services for Google Ai Generativelanguage v1beta API .. toctree:: :maxdepth: 2 + cache_service discuss_service file_service generative_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index ec4ca1f058e5..c69803d506d6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -18,6 +18,12 @@ __version__ = package_version.__version__ +from google.ai.generativelanguage_v1beta.services.cache_service.async_client import ( + CacheServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.cache_service.client import ( + CacheServiceClient, +) from google.ai.generativelanguage_v1beta.services.discuss_service.async_client import ( DiscussServiceAsyncClient, ) @@ -60,13 +66,25 @@ from google.ai.generativelanguage_v1beta.services.text_service.client import ( TextServiceClient, ) +from google.ai.generativelanguage_v1beta.types.cache_service import ( + CreateCachedContentRequest, + DeleteCachedContentRequest, + GetCachedContentRequest, + ListCachedContentsRequest, + ListCachedContentsResponse, + UpdateCachedContentRequest, +) +from google.ai.generativelanguage_v1beta.types.cached_content import CachedContent from google.ai.generativelanguage_v1beta.types.citation import ( CitationMetadata, CitationSource, ) from google.ai.generativelanguage_v1beta.types.content import ( Blob, + CodeExecution, + CodeExecutionResult, Content, + ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, @@ -89,7 +107,7 @@ Message, MessagePrompt, ) -from google.ai.generativelanguage_v1beta.types.file import File +from google.ai.generativelanguage_v1beta.types.file import File, VideoMetadata from google.ai.generativelanguage_v1beta.types.file_service import ( CreateFileRequest, CreateFileResponse, @@ -213,6 +231,8 @@ ) __all__ = ( + "CacheServiceClient", + "CacheServiceAsyncClient", "DiscussServiceClient", "DiscussServiceAsyncClient", "FileServiceClient", @@ -227,10 +247,20 @@ "RetrieverServiceAsyncClient", "TextServiceClient", "TextServiceAsyncClient", + "CreateCachedContentRequest", + "DeleteCachedContentRequest", + "GetCachedContentRequest", + "ListCachedContentsRequest", + "ListCachedContentsResponse", + "UpdateCachedContentRequest", + "CachedContent", "CitationMetadata", "CitationSource", "Blob", + "CodeExecution", + "CodeExecutionResult", "Content", + "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", @@ -251,6 +281,7 @@ "Message", "MessagePrompt", "File", + "VideoMetadata", "CreateFileRequest", "CreateFileResponse", "DeleteFileRequest", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 558c8aab67c5..1a1d9a4de546 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.6" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 558c8aab67c5..1a1d9a4de546 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.6" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py index 1a88e871be3f..7f380977c2b7 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py @@ -283,6 +283,12 @@ async def generate_content( r"""Generates a response from the model given an input ``GenerateContentRequest``. + Input capabilities differ between models, including tuned + models. See the `model + guide `__ and `tuning + guide `__ for + details. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -847,8 +853,9 @@ async def sample_count_tokens(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): - Required. The input given to the - model as a prompt. + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is + set. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py index ed5ea95c0b5f..886dfd3c9953 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py @@ -696,6 +696,12 @@ def generate_content( r"""Generates a response from the model given an input ``GenerateContentRequest``. + Input capabilities differ between models, including tuned + models. See the `model + guide `__ and `tuning + guide `__ for + details. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1248,8 +1254,9 @@ def sample_count_tokens(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The input given to the - model as a prompt. + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is + set. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py index 4abecde6aa9e..42aababa203f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py @@ -136,9 +136,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.stream_generate_content: gapic_v1.method.wrap_method( @@ -150,9 +150,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.embed_content: gapic_v1.method.wrap_method( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py index 72bee49af9db..bf48bd2a84b0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py @@ -247,6 +247,12 @@ def generate_content( Generates a response from the model given an input ``GenerateContentRequest``. + Input capabilities differ between models, including tuned + models. See the `model + guide `__ and `tuning + guide `__ for + details. + Returns: Callable[[~.GenerateContentRequest], ~.GenerateContentResponse]: diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py index 7fe91cc26e39..717d2ec4f14c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py @@ -251,6 +251,12 @@ def generate_content( Generates a response from the model given an input ``GenerateContentRequest``. + Input capabilities differ between models, including tuned + models. See the `model + guide `__ and `tuning + guide `__ for + details. + Returns: Callable[[~.GenerateContentRequest], Awaitable[~.GenerateContentResponse]]: @@ -401,9 +407,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.stream_generate_content: gapic_v1.method_async.wrap_method( @@ -415,9 +421,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.embed_content: gapic_v1.method_async.wrap_method( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py index 72b1c5dd6fac..d6fdd8587224 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py @@ -109,10 +109,13 @@ class Blob(proto.Message): Attributes: mime_type (str): - The IANA standard MIME type of the source - data. Accepted types include: "image/png", - "image/jpeg", "image/heic", "image/heif", - "image/webp". + The IANA standard MIME type of the source data. Examples: + + - image/png + - image/jpeg If an unsupported MIME type is provided, an + error will be returned. For a complete list of supported + types, see `Supported file + formats `__. data (bytes): Raw bytes for media formats. """ diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index 40547e9ba32d..e1121ef229db 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -273,6 +273,9 @@ class GenerateContentResponse(proto.Message): prompt_feedback (google.ai.generativelanguage_v1.types.GenerateContentResponse.PromptFeedback): Returns the prompt's feedback related to the content filters. + usage_metadata (google.ai.generativelanguage_v1.types.GenerateContentResponse.UsageMetadata): + Output only. Metadata on the generation + requests' token usage. """ class PromptFeedback(proto.Message): @@ -319,6 +322,33 @@ class BlockReason(proto.Enum): message=safety.SafetyRating, ) + class UsageMetadata(proto.Message): + r"""Metadata on the generation request's token usage. + + Attributes: + prompt_token_count (int): + Number of tokens in the prompt. + candidates_token_count (int): + Total number of tokens across the generated + candidates. + total_token_count (int): + Total token count for the generation request + (prompt + candidates). + """ + + prompt_token_count: int = proto.Field( + proto.INT32, + number=1, + ) + candidates_token_count: int = proto.Field( + proto.INT32, + number=2, + ) + total_token_count: int = proto.Field( + proto.INT32, + number=3, + ) + candidates: MutableSequence["Candidate"] = proto.RepeatedField( proto.MESSAGE, number=1, @@ -329,6 +359,11 @@ class BlockReason(proto.Enum): number=2, message=PromptFeedback, ) + usage_metadata: UsageMetadata = proto.Field( + proto.MESSAGE, + number=3, + message=UsageMetadata, + ) class Candidate(proto.Message): @@ -458,8 +493,9 @@ class EmbedContentRequest(proto.Message): output_dimensionality (int): Optional. Optional reduced dimension for the output embedding. If set, excessive values in the output embedding - are truncated from the end. Supported by - ``models/text-embedding-latest``. + are truncated from the end. Supported by newer models since + 2024, and the earlier model (``models/embedding-001``) + cannot specify this value. This field is a member of `oneof`_ ``_output_dimensionality``. """ @@ -584,8 +620,12 @@ class CountTokensRequest(proto.Message): Format: ``models/{model}`` contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The input given to the model as a - prompt. + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is set. + generate_content_request (google.ai.generativelanguage_v1.types.GenerateContentRequest): + Optional. The overall input given to the + model. CountTokens will count prompt, function + calling, etc. """ model: str = proto.Field( @@ -597,6 +637,11 @@ class CountTokensRequest(proto.Message): number=2, message=gag_content.Content, ) + generate_content_request: "GenerateContentRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="GenerateContentRequest", + ) class CountTokensResponse(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py index b04a40ed1606..100fc75977da 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py @@ -144,7 +144,7 @@ class SafetySetting(proto.Message): r"""Safety setting, affecting the safety-blocking behavior. Passing a safety setting for a category changes the allowed - proability that content is blocked. + probability that content is blocked. Attributes: category (google.ai.generativelanguage_v1.types.HarmCategory): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py index ac287aabd4d3..c692fa7725c9 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -18,6 +18,7 @@ __version__ = package_version.__version__ +from .services.cache_service import CacheServiceAsyncClient, CacheServiceClient from .services.discuss_service import DiscussServiceAsyncClient, DiscussServiceClient from .services.file_service import FileServiceAsyncClient, FileServiceClient from .services.generative_service import ( @@ -34,10 +35,22 @@ RetrieverServiceClient, ) from .services.text_service import TextServiceAsyncClient, TextServiceClient +from .types.cache_service import ( + CreateCachedContentRequest, + DeleteCachedContentRequest, + GetCachedContentRequest, + ListCachedContentsRequest, + ListCachedContentsResponse, + UpdateCachedContentRequest, +) +from .types.cached_content import CachedContent from .types.citation import CitationMetadata, CitationSource from .types.content import ( Blob, + CodeExecution, + CodeExecutionResult, Content, + ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, @@ -60,7 +73,7 @@ Message, MessagePrompt, ) -from .types.file import File +from .types.file import File, VideoMetadata from .types.file_service import ( CreateFileRequest, CreateFileResponse, @@ -184,6 +197,7 @@ ) __all__ = ( + "CacheServiceAsyncClient", "DiscussServiceAsyncClient", "FileServiceAsyncClient", "GenerativeServiceAsyncClient", @@ -202,11 +216,15 @@ "BatchUpdateChunksRequest", "BatchUpdateChunksResponse", "Blob", + "CacheServiceClient", + "CachedContent", "Candidate", "Chunk", "ChunkData", "CitationMetadata", "CitationSource", + "CodeExecution", + "CodeExecutionResult", "Condition", "Content", "ContentEmbedding", @@ -218,6 +236,7 @@ "CountTextTokensResponse", "CountTokensRequest", "CountTokensResponse", + "CreateCachedContentRequest", "CreateChunkRequest", "CreateCorpusRequest", "CreateDocumentRequest", @@ -228,6 +247,7 @@ "CreateTunedModelRequest", "CustomMetadata", "Dataset", + "DeleteCachedContentRequest", "DeleteChunkRequest", "DeleteCorpusRequest", "DeleteDocumentRequest", @@ -242,6 +262,7 @@ "EmbedTextResponse", "Embedding", "Example", + "ExecutableCode", "File", "FileData", "FileServiceClient", @@ -259,6 +280,7 @@ "GenerateTextResponse", "GenerationConfig", "GenerativeServiceClient", + "GetCachedContentRequest", "GetChunkRequest", "GetCorpusRequest", "GetDocumentRequest", @@ -271,6 +293,8 @@ "GroundingPassages", "HarmCategory", "Hyperparameters", + "ListCachedContentsRequest", + "ListCachedContentsResponse", "ListChunksRequest", "ListChunksResponse", "ListCorporaRequest", @@ -320,9 +344,11 @@ "TuningSnapshot", "TuningTask", "Type", + "UpdateCachedContentRequest", "UpdateChunkRequest", "UpdateCorpusRequest", "UpdateDocumentRequest", "UpdatePermissionRequest", "UpdateTunedModelRequest", + "VideoMetadata", ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json index 0f1a60c1cbd4..24a3b2565007 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json @@ -5,6 +5,100 @@ "protoPackage": "google.ai.generativelanguage.v1beta", "schema": "1.0", "services": { + "CacheService": { + "clients": { + "grpc": { + "libraryClient": "CacheServiceClient", + "rpcs": { + "CreateCachedContent": { + "methods": [ + "create_cached_content" + ] + }, + "DeleteCachedContent": { + "methods": [ + "delete_cached_content" + ] + }, + "GetCachedContent": { + "methods": [ + "get_cached_content" + ] + }, + "ListCachedContents": { + "methods": [ + "list_cached_contents" + ] + }, + "UpdateCachedContent": { + "methods": [ + "update_cached_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CacheServiceAsyncClient", + "rpcs": { + "CreateCachedContent": { + "methods": [ + "create_cached_content" + ] + }, + "DeleteCachedContent": { + "methods": [ + "delete_cached_content" + ] + }, + "GetCachedContent": { + "methods": [ + "get_cached_content" + ] + }, + "ListCachedContents": { + "methods": [ + "list_cached_contents" + ] + }, + "UpdateCachedContent": { + "methods": [ + "update_cached_content" + ] + } + } + }, + "rest": { + "libraryClient": "CacheServiceClient", + "rpcs": { + "CreateCachedContent": { + "methods": [ + "create_cached_content" + ] + }, + "DeleteCachedContent": { + "methods": [ + "delete_cached_content" + ] + }, + "GetCachedContent": { + "methods": [ + "get_cached_content" + ] + }, + "ListCachedContents": { + "methods": [ + "list_cached_contents" + ] + }, + "UpdateCachedContent": { + "methods": [ + "update_cached_content" + ] + } + } + } + } + }, "DiscussService": { "clients": { "grpc": { diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 558c8aab67c5..1a1d9a4de546 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.6" # {x-release-please-version} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets_alpha_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/__init__.py similarity index 77% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets_alpha_service.py rename to packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/__init__.py index a28a6b139780..2f8bc2e1ba03 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets_alpha_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/__init__.py @@ -13,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from .async_client import CacheServiceAsyncClient +from .client import CacheServiceClient -__protobuf__ = proto.module( - package="google.maps.mapsplatformdatasets.v1alpha", - manifest={}, +__all__ = ( + "CacheServiceClient", + "CacheServiceAsyncClient", ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py new file mode 100644 index 000000000000..e3954cce5f92 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py @@ -0,0 +1,799 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.cache_service import pagers +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content +from google.ai.generativelanguage_v1beta.types import content + +from .client import CacheServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CacheServiceTransport +from .transports.grpc_asyncio import CacheServiceGrpcAsyncIOTransport + + +class CacheServiceAsyncClient: + """API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + """ + + _client: CacheServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CacheServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CacheServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CacheServiceClient._DEFAULT_UNIVERSE + + cached_content_path = staticmethod(CacheServiceClient.cached_content_path) + parse_cached_content_path = staticmethod( + CacheServiceClient.parse_cached_content_path + ) + model_path = staticmethod(CacheServiceClient.model_path) + parse_model_path = staticmethod(CacheServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + CacheServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CacheServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CacheServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(CacheServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(CacheServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + CacheServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CacheServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CacheServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(CacheServiceClient.common_location_path) + parse_common_location_path = staticmethod( + CacheServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceAsyncClient: The constructed client. + """ + return CacheServiceClient.from_service_account_info.__func__(CacheServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceAsyncClient: The constructed client. + """ + return CacheServiceClient.from_service_account_file.__func__(CacheServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CacheServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CacheServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CacheServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(CacheServiceClient).get_transport_class, type(CacheServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CacheServiceTransport, Callable[..., CacheServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cache service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CacheServiceTransport,Callable[..., CacheServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CacheServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CacheServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_cached_contents( + self, + request: Optional[Union[cache_service.ListCachedContentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCachedContentsAsyncPager: + r"""Lists CachedContents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListCachedContentsRequest, dict]]): + The request object. Request to list CachedContents. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.cache_service.pagers.ListCachedContentsAsyncPager: + Response with CachedContents list. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.ListCachedContentsRequest): + request = cache_service.ListCachedContentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_cached_contents + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCachedContentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cached_content( + self, + request: Optional[Union[cache_service.CreateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_cached_content.CachedContent: + r"""Creates CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCachedContentRequest( + ) + + # Make the request + response = await client.create_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CreateCachedContentRequest, dict]]): + The request object. Request to create CachedContent. + cached_content (:class:`google.ai.generativelanguage_v1beta.types.CachedContent`): + Required. The cached content to + create. + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.CreateCachedContentRequest): + request = cache_service.CreateCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_cached_content + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cached_content( + self, + request: Optional[Union[cache_service.GetCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cached_content.CachedContent: + r"""Reads CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetCachedContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetCachedContentRequest, dict]]): + The request object. Request to read CachedContent. + name (:class:`str`): + Required. The resource name referring to the content + cache entry. Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.GetCachedContentRequest): + request = cache_service.GetCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cached_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cached_content( + self, + request: Optional[Union[cache_service.UpdateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_cached_content.CachedContent: + r"""Updates CachedContent resource (only expiration is + updatable). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCachedContentRequest( + ) + + # Make the request + response = await client.update_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.UpdateCachedContentRequest, dict]]): + The request object. Request to update CachedContent. + cached_content (:class:`google.ai.generativelanguage_v1beta.types.CachedContent`): + Required. The content cache entry to + update + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.UpdateCachedContentRequest): + request = cache_service.UpdateCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cached_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cached_content.name", request.cached_content.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_cached_content( + self, + request: Optional[Union[cache_service.DeleteCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_delete_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteCachedContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_cached_content(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.DeleteCachedContentRequest, dict]]): + The request object. Request to delete CachedContent. + name (:class:`str`): + Required. The resource name referring to the content + cache entry Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.DeleteCachedContentRequest): + request = cache_service.DeleteCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_cached_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "CacheServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CacheServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py new file mode 100644 index 000000000000..d40789994272 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py @@ -0,0 +1,1215 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.cache_service import pagers +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content +from google.ai.generativelanguage_v1beta.types import content + +from .transports.base import DEFAULT_CLIENT_INFO, CacheServiceTransport +from .transports.grpc import CacheServiceGrpcTransport +from .transports.grpc_asyncio import CacheServiceGrpcAsyncIOTransport +from .transports.rest import CacheServiceRestTransport + + +class CacheServiceClientMeta(type): + """Metaclass for the CacheService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CacheServiceTransport]] + _transport_registry["grpc"] = CacheServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CacheServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CacheServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CacheServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CacheServiceClient(metaclass=CacheServiceClientMeta): + """API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CacheServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CacheServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cached_content_path( + id: str, + ) -> str: + """Returns a fully-qualified cached_content string.""" + return "cachedContents/{id}".format( + id=id, + ) + + @staticmethod + def parse_cached_content_path(path: str) -> Dict[str, str]: + """Parses a cached_content path into its component segments.""" + m = re.match(r"^cachedContents/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = CacheServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = CacheServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CacheServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = CacheServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or CacheServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CacheServiceTransport, Callable[..., CacheServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cache service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CacheServiceTransport,Callable[..., CacheServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CacheServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = CacheServiceClient._read_environment_variables() + self._client_cert_source = CacheServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = CacheServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CacheServiceTransport) + if transport_provided: + # transport is a CacheServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CacheServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or CacheServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[CacheServiceTransport], Callable[..., CacheServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CacheServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_cached_contents( + self, + request: Optional[Union[cache_service.ListCachedContentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCachedContentsPager: + r"""Lists CachedContents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListCachedContentsRequest, dict]): + The request object. Request to list CachedContents. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.cache_service.pagers.ListCachedContentsPager: + Response with CachedContents list. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.ListCachedContentsRequest): + request = cache_service.ListCachedContentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cached_contents] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCachedContentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cached_content( + self, + request: Optional[Union[cache_service.CreateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_cached_content.CachedContent: + r"""Creates CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCachedContentRequest( + ) + + # Make the request + response = client.create_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CreateCachedContentRequest, dict]): + The request object. Request to create CachedContent. + cached_content (google.ai.generativelanguage_v1beta.types.CachedContent): + Required. The cached content to + create. + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.CreateCachedContentRequest): + request = cache_service.CreateCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cached_content] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cached_content( + self, + request: Optional[Union[cache_service.GetCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cached_content.CachedContent: + r"""Reads CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetCachedContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetCachedContentRequest, dict]): + The request object. Request to read CachedContent. + name (str): + Required. The resource name referring to the content + cache entry. Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.GetCachedContentRequest): + request = cache_service.GetCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cached_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cached_content( + self, + request: Optional[Union[cache_service.UpdateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_cached_content.CachedContent: + r"""Updates CachedContent resource (only expiration is + updatable). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCachedContentRequest( + ) + + # Make the request + response = client.update_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.UpdateCachedContentRequest, dict]): + The request object. Request to update CachedContent. + cached_content (google.ai.generativelanguage_v1beta.types.CachedContent): + Required. The content cache entry to + update + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.UpdateCachedContentRequest): + request = cache_service.UpdateCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cached_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cached_content.name", request.cached_content.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_cached_content( + self, + request: Optional[Union[cache_service.DeleteCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_delete_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteCachedContentRequest( + name="name_value", + ) + + # Make the request + client.delete_cached_content(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.DeleteCachedContentRequest, dict]): + The request object. Request to delete CachedContent. + name (str): + Required. The resource name referring to the content + cache entry Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.DeleteCachedContentRequest): + request = cache_service.DeleteCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cached_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "CacheServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CacheServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/pagers.py new file mode 100644 index 000000000000..a73278101f3d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.ai.generativelanguage_v1beta.types import cache_service, cached_content + + +class ListCachedContentsPager: + """A pager for iterating through ``list_cached_contents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListCachedContentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cached_contents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCachedContents`` requests and continue to iterate + through the ``cached_contents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListCachedContentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cache_service.ListCachedContentsResponse], + request: cache_service.ListCachedContentsRequest, + response: cache_service.ListCachedContentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListCachedContentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListCachedContentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cache_service.ListCachedContentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cache_service.ListCachedContentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cached_content.CachedContent]: + for page in self.pages: + yield from page.cached_contents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCachedContentsAsyncPager: + """A pager for iterating through ``list_cached_contents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListCachedContentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``cached_contents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCachedContents`` requests and continue to iterate + through the ``cached_contents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListCachedContentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cache_service.ListCachedContentsResponse]], + request: cache_service.ListCachedContentsRequest, + response: cache_service.ListCachedContentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListCachedContentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListCachedContentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cache_service.ListCachedContentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cache_service.ListCachedContentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[cached_content.CachedContent]: + async def async_generator(): + async for page in self.pages: + for response in page.cached_contents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/__init__.py new file mode 100644 index 000000000000..cef091cd23ab --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CacheServiceTransport +from .grpc import CacheServiceGrpcTransport +from .grpc_asyncio import CacheServiceGrpcAsyncIOTransport +from .rest import CacheServiceRestInterceptor, CacheServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CacheServiceTransport]] +_transport_registry["grpc"] = CacheServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CacheServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CacheServiceRestTransport + +__all__ = ( + "CacheServiceTransport", + "CacheServiceGrpcTransport", + "CacheServiceGrpcAsyncIOTransport", + "CacheServiceRestTransport", + "CacheServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/base.py new file mode 100644 index 000000000000..3e4f6ea5bb4b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/base.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CacheServiceTransport(abc.ABC): + """Abstract transport class for CacheService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_cached_contents: gapic_v1.method.wrap_method( + self.list_cached_contents, + default_timeout=None, + client_info=client_info, + ), + self.create_cached_content: gapic_v1.method.wrap_method( + self.create_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.get_cached_content: gapic_v1.method.wrap_method( + self.get_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.update_cached_content: gapic_v1.method.wrap_method( + self.update_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.delete_cached_content: gapic_v1.method.wrap_method( + self.delete_cached_content, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + Union[ + cache_service.ListCachedContentsResponse, + Awaitable[cache_service.ListCachedContentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], + Union[ + gag_cached_content.CachedContent, + Awaitable[gag_cached_content.CachedContent], + ], + ]: + raise NotImplementedError() + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], + Union[cached_content.CachedContent, Awaitable[cached_content.CachedContent]], + ]: + raise NotImplementedError() + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], + Union[ + gag_cached_content.CachedContent, + Awaitable[gag_cached_content.CachedContent], + ], + ]: + raise NotImplementedError() + + @property + def delete_cached_content( + self, + ) -> Callable[ + [cache_service.DeleteCachedContentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CacheServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/grpc.py new file mode 100644 index 000000000000..2f3c6572bc62 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/grpc.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content + +from .base import DEFAULT_CLIENT_INFO, CacheServiceTransport + + +class CacheServiceGrpcTransport(CacheServiceTransport): + """gRPC backend transport for CacheService. + + API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + cache_service.ListCachedContentsResponse, + ]: + r"""Return a callable for the list cached contents method over gRPC. + + Lists CachedContents. + + Returns: + Callable[[~.ListCachedContentsRequest], + ~.ListCachedContentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_cached_contents" not in self._stubs: + self._stubs["list_cached_contents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/ListCachedContents", + request_serializer=cache_service.ListCachedContentsRequest.serialize, + response_deserializer=cache_service.ListCachedContentsResponse.deserialize, + ) + return self._stubs["list_cached_contents"] + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], gag_cached_content.CachedContent + ]: + r"""Return a callable for the create cached content method over gRPC. + + Creates CachedContent resource. + + Returns: + Callable[[~.CreateCachedContentRequest], + ~.CachedContent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cached_content" not in self._stubs: + self._stubs["create_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/CreateCachedContent", + request_serializer=cache_service.CreateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["create_cached_content"] + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], cached_content.CachedContent + ]: + r"""Return a callable for the get cached content method over gRPC. + + Reads CachedContent resource. + + Returns: + Callable[[~.GetCachedContentRequest], + ~.CachedContent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cached_content" not in self._stubs: + self._stubs["get_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/GetCachedContent", + request_serializer=cache_service.GetCachedContentRequest.serialize, + response_deserializer=cached_content.CachedContent.deserialize, + ) + return self._stubs["get_cached_content"] + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], gag_cached_content.CachedContent + ]: + r"""Return a callable for the update cached content method over gRPC. + + Updates CachedContent resource (only expiration is + updatable). + + Returns: + Callable[[~.UpdateCachedContentRequest], + ~.CachedContent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cached_content" not in self._stubs: + self._stubs["update_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/UpdateCachedContent", + request_serializer=cache_service.UpdateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["update_cached_content"] + + @property + def delete_cached_content( + self, + ) -> Callable[[cache_service.DeleteCachedContentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete cached content method over gRPC. + + Deletes CachedContent resource. + + Returns: + Callable[[~.DeleteCachedContentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cached_content" not in self._stubs: + self._stubs["delete_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/DeleteCachedContent", + request_serializer=cache_service.DeleteCachedContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_cached_content"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CacheServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..931c69f90d0b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/grpc_asyncio.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content + +from .base import DEFAULT_CLIENT_INFO, CacheServiceTransport +from .grpc import CacheServiceGrpcTransport + + +class CacheServiceGrpcAsyncIOTransport(CacheServiceTransport): + """gRPC AsyncIO backend transport for CacheService. + + API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + Awaitable[cache_service.ListCachedContentsResponse], + ]: + r"""Return a callable for the list cached contents method over gRPC. + + Lists CachedContents. + + Returns: + Callable[[~.ListCachedContentsRequest], + Awaitable[~.ListCachedContentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_cached_contents" not in self._stubs: + self._stubs["list_cached_contents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/ListCachedContents", + request_serializer=cache_service.ListCachedContentsRequest.serialize, + response_deserializer=cache_service.ListCachedContentsResponse.deserialize, + ) + return self._stubs["list_cached_contents"] + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], + Awaitable[gag_cached_content.CachedContent], + ]: + r"""Return a callable for the create cached content method over gRPC. + + Creates CachedContent resource. + + Returns: + Callable[[~.CreateCachedContentRequest], + Awaitable[~.CachedContent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cached_content" not in self._stubs: + self._stubs["create_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/CreateCachedContent", + request_serializer=cache_service.CreateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["create_cached_content"] + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], Awaitable[cached_content.CachedContent] + ]: + r"""Return a callable for the get cached content method over gRPC. + + Reads CachedContent resource. + + Returns: + Callable[[~.GetCachedContentRequest], + Awaitable[~.CachedContent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cached_content" not in self._stubs: + self._stubs["get_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/GetCachedContent", + request_serializer=cache_service.GetCachedContentRequest.serialize, + response_deserializer=cached_content.CachedContent.deserialize, + ) + return self._stubs["get_cached_content"] + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], + Awaitable[gag_cached_content.CachedContent], + ]: + r"""Return a callable for the update cached content method over gRPC. + + Updates CachedContent resource (only expiration is + updatable). + + Returns: + Callable[[~.UpdateCachedContentRequest], + Awaitable[~.CachedContent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cached_content" not in self._stubs: + self._stubs["update_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/UpdateCachedContent", + request_serializer=cache_service.UpdateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["update_cached_content"] + + @property + def delete_cached_content( + self, + ) -> Callable[ + [cache_service.DeleteCachedContentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete cached content method over gRPC. + + Deletes CachedContent resource. + + Returns: + Callable[[~.DeleteCachedContentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cached_content" not in self._stubs: + self._stubs["delete_cached_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.CacheService/DeleteCachedContent", + request_serializer=cache_service.DeleteCachedContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_cached_content"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_cached_contents: gapic_v1.method_async.wrap_method( + self.list_cached_contents, + default_timeout=None, + client_info=client_info, + ), + self.create_cached_content: gapic_v1.method_async.wrap_method( + self.create_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.get_cached_content: gapic_v1.method_async.wrap_method( + self.get_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.update_cached_content: gapic_v1.method_async.wrap_method( + self.update_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.delete_cached_content: gapic_v1.method_async.wrap_method( + self.delete_cached_content, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("CacheServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py new file mode 100644 index 000000000000..9f0b2708177e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py @@ -0,0 +1,821 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content + +from .base import CacheServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CacheServiceRestInterceptor: + """Interceptor for CacheService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CacheServiceRestTransport. + + .. code-block:: python + class MyCustomCacheServiceInterceptor(CacheServiceRestInterceptor): + def pre_create_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cached_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cached_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cached_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cached_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cached_content(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CacheServiceRestTransport(interceptor=MyCustomCacheServiceInterceptor()) + client = CacheServiceClient(transport=transport) + + + """ + + def pre_create_cached_content( + self, + request: cache_service.CreateCachedContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cache_service.CreateCachedContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_create_cached_content( + self, response: gag_cached_content.CachedContent + ) -> gag_cached_content.CachedContent: + """Post-rpc interceptor for create_cached_content + + Override in a subclass to manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. + """ + return response + + def pre_delete_cached_content( + self, + request: cache_service.DeleteCachedContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cache_service.DeleteCachedContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def pre_get_cached_content( + self, + request: cache_service.GetCachedContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cache_service.GetCachedContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_get_cached_content( + self, response: cached_content.CachedContent + ) -> cached_content.CachedContent: + """Post-rpc interceptor for get_cached_content + + Override in a subclass to manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. + """ + return response + + def pre_list_cached_contents( + self, + request: cache_service.ListCachedContentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cache_service.ListCachedContentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_cached_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_list_cached_contents( + self, response: cache_service.ListCachedContentsResponse + ) -> cache_service.ListCachedContentsResponse: + """Post-rpc interceptor for list_cached_contents + + Override in a subclass to manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. + """ + return response + + def pre_update_cached_content( + self, + request: cache_service.UpdateCachedContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cache_service.UpdateCachedContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_update_cached_content( + self, response: gag_cached_content.CachedContent + ) -> gag_cached_content.CachedContent: + """Post-rpc interceptor for update_cached_content + + Override in a subclass to manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CacheServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CacheServiceRestInterceptor + + +class CacheServiceRestTransport(CacheServiceTransport): + """REST backend transport for CacheService. + + API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CacheServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CacheServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateCachedContent(CacheServiceRestStub): + def __hash__(self): + return hash("CreateCachedContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cache_service.CreateCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_cached_content.CachedContent: + r"""Call the create cached content method over HTTP. + + Args: + request (~.cache_service.CreateCachedContentRequest): + The request object. Request to create CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gag_cached_content.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/cachedContents", + "body": "cached_content", + }, + ] + request, metadata = self._interceptor.pre_create_cached_content( + request, metadata + ) + pb_request = cache_service.CreateCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_cached_content.CachedContent() + pb_resp = gag_cached_content.CachedContent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cached_content(resp) + return resp + + class _DeleteCachedContent(CacheServiceRestStub): + def __hash__(self): + return hash("DeleteCachedContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cache_service.DeleteCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete cached content method over HTTP. + + Args: + request (~.cache_service.DeleteCachedContentRequest): + The request object. Request to delete CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=cachedContents/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cached_content( + request, metadata + ) + pb_request = cache_service.DeleteCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetCachedContent(CacheServiceRestStub): + def __hash__(self): + return hash("GetCachedContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cache_service.GetCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cached_content.CachedContent: + r"""Call the get cached content method over HTTP. + + Args: + request (~.cache_service.GetCachedContentRequest): + The request object. Request to read CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cached_content.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=cachedContents/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cached_content( + request, metadata + ) + pb_request = cache_service.GetCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cached_content.CachedContent() + pb_resp = cached_content.CachedContent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cached_content(resp) + return resp + + class _ListCachedContents(CacheServiceRestStub): + def __hash__(self): + return hash("ListCachedContents") + + def __call__( + self, + request: cache_service.ListCachedContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cache_service.ListCachedContentsResponse: + r"""Call the list cached contents method over HTTP. + + Args: + request (~.cache_service.ListCachedContentsRequest): + The request object. Request to list CachedContents. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cache_service.ListCachedContentsResponse: + Response with CachedContents list. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/cachedContents", + }, + ] + request, metadata = self._interceptor.pre_list_cached_contents( + request, metadata + ) + pb_request = cache_service.ListCachedContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cache_service.ListCachedContentsResponse() + pb_resp = cache_service.ListCachedContentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cached_contents(resp) + return resp + + class _UpdateCachedContent(CacheServiceRestStub): + def __hash__(self): + return hash("UpdateCachedContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cache_service.UpdateCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_cached_content.CachedContent: + r"""Call the update cached content method over HTTP. + + Args: + request (~.cache_service.UpdateCachedContentRequest): + The request object. Request to update CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gag_cached_content.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{cached_content.name=cachedContents/*}", + "body": "cached_content", + }, + ] + request, metadata = self._interceptor.pre_update_cached_content( + request, metadata + ) + pb_request = cache_service.UpdateCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_cached_content.CachedContent() + pb_resp = gag_cached_content.CachedContent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cached_content(resp) + return resp + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], gag_cached_content.CachedContent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cached_content( + self, + ) -> Callable[[cache_service.DeleteCachedContentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], cached_content.CachedContent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + cache_service.ListCachedContentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCachedContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], gag_cached_content.CachedContent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CacheServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py index b053cf0c1562..ba4e975df277 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py @@ -45,6 +45,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.ai.generativelanguage_v1beta.services.file_service import pagers from google.ai.generativelanguage_v1beta.types import file, file_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py index c90c1a2766c2..a3c61caa6edd 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py @@ -50,6 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.ai.generativelanguage_v1beta.services.file_service import pagers from google.ai.generativelanguage_v1beta.types import file, file_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py index 5d8ab987fdde..dc9ba6241807 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py @@ -70,6 +70,10 @@ class GenerativeServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = GenerativeServiceClient._DEFAULT_UNIVERSE + cached_content_path = staticmethod(GenerativeServiceClient.cached_content_path) + parse_cached_content_path = staticmethod( + GenerativeServiceClient.parse_cached_content_path + ) model_path = staticmethod(GenerativeServiceClient.model_path) parse_model_path = staticmethod(GenerativeServiceClient.parse_model_path) common_billing_account_path = staticmethod( @@ -1018,8 +1022,9 @@ async def sample_count_tokens(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): - Optional. The input given to the - model as a prompt. + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is + set. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py index 5f67920cadaa..0dc4b14b681f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py @@ -189,6 +189,21 @@ def transport(self) -> GenerativeServiceTransport: """ return self._transport + @staticmethod + def cached_content_path( + id: str, + ) -> str: + """Returns a fully-qualified cached_content string.""" + return "cachedContents/{id}".format( + id=id, + ) + + @staticmethod + def parse_cached_content_path(path: str) -> Dict[str, str]: + """Parses a cached_content path into its component segments.""" + m = re.match(r"^cachedContents/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def model_path( model: str, @@ -1416,8 +1431,9 @@ def sample_count_tokens(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Optional. The input given to the - model as a prompt. + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is + set. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py index 47b4f4fa55cd..da37ba9527bd 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py @@ -136,9 +136,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.generate_answer: gapic_v1.method.wrap_method( @@ -164,9 +164,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.embed_content: gapic_v1.method.wrap_method( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py index b538c20c274d..6c885a4515b4 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py @@ -437,9 +437,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.generate_answer: gapic_v1.method_async.wrap_method( @@ -465,9 +465,9 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), - default_timeout=60.0, + default_timeout=600.0, client_info=client_info, ), self.embed_content: gapic_v1.method_async.wrap_method( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py index 5f0610f5f019..89b4f8ad01b8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -13,10 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .cache_service import ( + CreateCachedContentRequest, + DeleteCachedContentRequest, + GetCachedContentRequest, + ListCachedContentsRequest, + ListCachedContentsResponse, + UpdateCachedContentRequest, +) +from .cached_content import CachedContent from .citation import CitationMetadata, CitationSource from .content import ( Blob, + CodeExecution, + CodeExecutionResult, Content, + ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, @@ -39,7 +51,7 @@ Message, MessagePrompt, ) -from .file import File +from .file import File, VideoMetadata from .file_service import ( CreateFileRequest, CreateFileResponse, @@ -163,10 +175,20 @@ ) __all__ = ( + "CreateCachedContentRequest", + "DeleteCachedContentRequest", + "GetCachedContentRequest", + "ListCachedContentsRequest", + "ListCachedContentsResponse", + "UpdateCachedContentRequest", + "CachedContent", "CitationMetadata", "CitationSource", "Blob", + "CodeExecution", + "CodeExecutionResult", "Content", + "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", @@ -187,6 +209,7 @@ "Message", "MessagePrompt", "File", + "VideoMetadata", "CreateFileRequest", "CreateFileResponse", "DeleteFileRequest", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/cache_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/cache_service.py new file mode 100644 index 000000000000..359f7b820889 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/cache_service.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "ListCachedContentsRequest", + "ListCachedContentsResponse", + "CreateCachedContentRequest", + "GetCachedContentRequest", + "UpdateCachedContentRequest", + "DeleteCachedContentRequest", + }, +) + + +class ListCachedContentsRequest(proto.Message): + r"""Request to list CachedContents. + + Attributes: + page_size (int): + Optional. The maximum number of cached + contents to return. The service may return fewer + than this value. If unspecified, some default + (under maximum) number of items will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListCachedContents`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListCachedContents`` must match the call that provided the + page token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListCachedContentsResponse(proto.Message): + r"""Response with CachedContents list. + + Attributes: + cached_contents (MutableSequence[google.ai.generativelanguage_v1beta.types.CachedContent]): + List of cached contents. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + cached_contents: MutableSequence[ + gag_cached_content.CachedContent + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_cached_content.CachedContent, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateCachedContentRequest(proto.Message): + r"""Request to create CachedContent. + + Attributes: + cached_content (google.ai.generativelanguage_v1beta.types.CachedContent): + Required. The cached content to create. + """ + + cached_content: gag_cached_content.CachedContent = proto.Field( + proto.MESSAGE, + number=1, + message=gag_cached_content.CachedContent, + ) + + +class GetCachedContentRequest(proto.Message): + r"""Request to read CachedContent. + + Attributes: + name (str): + Required. The resource name referring to the content cache + entry. Format: ``cachedContents/{id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCachedContentRequest(proto.Message): + r"""Request to update CachedContent. + + Attributes: + cached_content (google.ai.generativelanguage_v1beta.types.CachedContent): + Required. The content cache entry to update + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + """ + + cached_content: gag_cached_content.CachedContent = proto.Field( + proto.MESSAGE, + number=1, + message=gag_cached_content.CachedContent, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteCachedContentRequest(proto.Message): + r"""Request to delete CachedContent. + + Attributes: + name (str): + Required. The resource name referring to the content cache + entry Format: ``cachedContents/{id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/cached_content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/cached_content.py new file mode 100644 index 000000000000..35eda8feb849 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/cached_content.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import content + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "CachedContent", + }, +) + + +class CachedContent(proto.Message): + r"""Content that has been preprocessed and can be used in + subsequent request to GenerativeService. + + Cached content can be only used with model it was created for. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp in UTC of when this resource is considered + expired. This is *always* provided on output, regardless of + what was sent on input. + + This field is a member of `oneof`_ ``expiration``. + ttl (google.protobuf.duration_pb2.Duration): + Input only. New TTL for this resource, input + only. + + This field is a member of `oneof`_ ``expiration``. + name (str): + Optional. Identifier. The resource name referring to the + cached content. Format: ``cachedContents/{id}`` + + This field is a member of `oneof`_ ``_name``. + display_name (str): + Optional. Immutable. The user-generated + meaningful display name of the cached content. + Maximum 128 Unicode characters. + + This field is a member of `oneof`_ ``_display_name``. + model (str): + Required. Immutable. The name of the ``Model`` to use for + cached content Format: ``models/{model}`` + + This field is a member of `oneof`_ ``_model``. + system_instruction (google.ai.generativelanguage_v1beta.types.Content): + Optional. Input only. Immutable. Developer + set system instruction. Currently text only. + + This field is a member of `oneof`_ ``_system_instruction``. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Optional. Input only. Immutable. The content + to cache. + tools (MutableSequence[google.ai.generativelanguage_v1beta.types.Tool]): + Optional. Input only. Immutable. A list of ``Tools`` the + model may use to generate the next response + tool_config (google.ai.generativelanguage_v1beta.types.ToolConfig): + Optional. Input only. Immutable. Tool config. + This config is shared for all tools. + + This field is a member of `oneof`_ ``_tool_config``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of the cache + entry. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the cache entry was last + updated in UTC time. + usage_metadata (google.ai.generativelanguage_v1beta.types.CachedContent.UsageMetadata): + Output only. Metadata on the usage of the + cached content. + """ + + class UsageMetadata(proto.Message): + r"""Metadata on the usage of the cached content. + + Attributes: + total_token_count (int): + Total number of tokens that the cached + content consumes. + """ + + total_token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + oneof="expiration", + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + oneof="expiration", + message=duration_pb2.Duration, + ) + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + model: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + system_instruction: content.Content = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=content.Content, + ) + contents: MutableSequence[content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=content.Content, + ) + tools: MutableSequence[content.Tool] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=content.Tool, + ) + tool_config: content.ToolConfig = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=content.ToolConfig, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + usage_metadata: UsageMetadata = proto.Field( + proto.MESSAGE, + number=12, + message=UsageMetadata, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index c8a48e464db9..c5e42c09d5d4 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -28,7 +28,10 @@ "Part", "Blob", "FileData", + "ExecutableCode", + "CodeExecutionResult", "Tool", + "CodeExecution", "ToolConfig", "FunctionCallingConfig", "FunctionDeclaration", @@ -145,6 +148,15 @@ class Part(proto.Message): file_data (google.ai.generativelanguage_v1beta.types.FileData): URI based data. + This field is a member of `oneof`_ ``data``. + executable_code (google.ai.generativelanguage_v1beta.types.ExecutableCode): + Code generated by the model that is meant to + be executed. + + This field is a member of `oneof`_ ``data``. + code_execution_result (google.ai.generativelanguage_v1beta.types.CodeExecutionResult): + Result of executing the ``ExecutableCode``. + This field is a member of `oneof`_ ``data``. """ @@ -177,6 +189,18 @@ class Part(proto.Message): oneof="data", message="FileData", ) + executable_code: "ExecutableCode" = proto.Field( + proto.MESSAGE, + number=9, + oneof="data", + message="ExecutableCode", + ) + code_execution_result: "CodeExecutionResult" = proto.Field( + proto.MESSAGE, + number=10, + oneof="data", + message="CodeExecutionResult", + ) class Blob(proto.Message): @@ -228,6 +252,94 @@ class FileData(proto.Message): ) +class ExecutableCode(proto.Message): + r"""Code generated by the model that is meant to be executed, and the + result returned to the model. + + Only generated when using the ``CodeExecution`` tool, in which the + code will be automatically executed, and a corresponding + ``CodeExecutionResult`` will also be generated. + + Attributes: + language (google.ai.generativelanguage_v1beta.types.ExecutableCode.Language): + Required. Programming language of the ``code``. + code (str): + Required. The code to be executed. + """ + + class Language(proto.Enum): + r"""Supported programming languages for the generated code. + + Values: + LANGUAGE_UNSPECIFIED (0): + Unspecified language. This value should not + be used. + PYTHON (1): + Python >= 3.10, with numpy and simpy + available. + """ + LANGUAGE_UNSPECIFIED = 0 + PYTHON = 1 + + language: Language = proto.Field( + proto.ENUM, + number=1, + enum=Language, + ) + code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CodeExecutionResult(proto.Message): + r"""Result of executing the ``ExecutableCode``. + + Only generated when using the ``CodeExecution``, and always follows + a ``part`` containing the ``ExecutableCode``. + + Attributes: + outcome (google.ai.generativelanguage_v1beta.types.CodeExecutionResult.Outcome): + Required. Outcome of the code execution. + output (str): + Optional. Contains stdout when code execution + is successful, stderr or other description + otherwise. + """ + + class Outcome(proto.Enum): + r"""Enumeration of possible outcomes of the code execution. + + Values: + OUTCOME_UNSPECIFIED (0): + Unspecified status. This value should not be + used. + OUTCOME_OK (1): + Code execution completed successfully. + OUTCOME_FAILED (2): + Code execution finished but with a failure. ``stderr`` + should contain the reason. + OUTCOME_DEADLINE_EXCEEDED (3): + Code execution ran for too long, and was + cancelled. There may or may not be a partial + output present. + """ + OUTCOME_UNSPECIFIED = 0 + OUTCOME_OK = 1 + OUTCOME_FAILED = 2 + OUTCOME_DEADLINE_EXCEEDED = 3 + + outcome: Outcome = proto.Field( + proto.ENUM, + number=1, + enum=Outcome, + ) + output: str = proto.Field( + proto.STRING, + number=2, + ) + + class Tool(proto.Message): r"""Tool details that the model may use to generate response. @@ -250,6 +362,9 @@ class Tool(proto.Message): [FunctionResponse][content.part.function_response] with the [content.role] "function" generation context for the next model turn. + code_execution (google.ai.generativelanguage_v1beta.types.CodeExecution): + Optional. Enables the model to execute code + as part of generation. """ function_declarations: MutableSequence["FunctionDeclaration"] = proto.RepeatedField( @@ -257,6 +372,21 @@ class Tool(proto.Message): number=1, message="FunctionDeclaration", ) + code_execution: "CodeExecution" = proto.Field( + proto.MESSAGE, + number=3, + message="CodeExecution", + ) + + +class CodeExecution(proto.Message): + r"""Tool that executes code generated by the model, and automatically + returns the result to the model. + + See also ``ExecutableCode`` and ``CodeExecutionResult`` which are + only generated when using this tool. + + """ class ToolConfig(proto.Message): @@ -304,7 +434,7 @@ class Mode(proto.Enum): AUTO (1): Default model behavior, model decides to predict either a function call or a natural - language repspose. + language response. ANY (2): Model is constrained to always predicting a function call only. If "allowed_function_names" are set, the predicted diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py index c1ae9a6ebcaf..387d00aafbf7 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py @@ -17,13 +17,16 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.ai.generativelanguage.v1beta", manifest={ "File", + "VideoMetadata", }, ) @@ -31,7 +34,13 @@ class File(proto.Message): r"""A file uploaded to the API. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + video_metadata (google.ai.generativelanguage_v1beta.types.VideoMetadata): + Output only. Metadata for a video. + + This field is a member of `oneof`_ ``metadata``. name (str): Immutable. Identifier. The ``File`` resource name. The ID (name excluding the "files/" prefix) can contain up to 40 @@ -62,6 +71,9 @@ class File(proto.Message): Output only. The uri of the ``File``. state (google.ai.generativelanguage_v1beta.types.File.State): Output only. Processing state of the File. + error (google.rpc.status_pb2.Status): + Output only. Error status if File processing + failed. """ class State(proto.Enum): @@ -85,6 +97,12 @@ class State(proto.Enum): ACTIVE = 2 FAILED = 10 + video_metadata: "VideoMetadata" = proto.Field( + proto.MESSAGE, + number=12, + oneof="metadata", + message="VideoMetadata", + ) name: str = proto.Field( proto.STRING, number=1, @@ -129,6 +147,26 @@ class State(proto.Enum): number=10, enum=State, ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + + +class VideoMetadata(proto.Message): + r"""Metadata for a video ``File``. + + Attributes: + video_duration (google.protobuf.duration_pb2.Duration): + Duration of the video. + """ + + video_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index 7fa328ee4da8..a6cbe296b63c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -143,6 +143,14 @@ class GenerateContentRequest(proto.Message): generation and outputs. This field is a member of `oneof`_ ``_generation_config``. + cached_content (str): + Optional. The name of the cached content used as context to + serve the prediction. Note: only used in explicit caching, + where users can have control over caching (e.g. what content + to cache) and enjoy guaranteed cost savings. Format: + ``cachedContents/{cachedContent}`` + + This field is a member of `oneof`_ ``_cached_content``. """ model: str = proto.Field( @@ -181,6 +189,11 @@ class GenerateContentRequest(proto.Message): optional=True, message="GenerationConfig", ) + cached_content: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) class GenerationConfig(proto.Message): @@ -418,7 +431,7 @@ class BlockReason(proto.Enum): ``safety_ratings`` to understand which safety category blocked it. OTHER (2): - Prompt was blocked due to unknown reaasons. + Prompt was blocked due to unknown reasons. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 @@ -442,7 +455,12 @@ class UsageMetadata(proto.Message): Attributes: prompt_token_count (int): - Number of tokens in the prompt. + Number of tokens in the prompt. When cached_content is set, + this is still the total effective prompt size. I.e. this + includes the number of tokens in the cached content. + cached_content_token_count (int): + Number of tokens in the cached part of the + prompt, i.e. in the cached content. candidates_token_count (int): Total number of tokens across the generated candidates. @@ -455,6 +473,10 @@ class UsageMetadata(proto.Message): proto.INT32, number=1, ) + cached_content_token_count: int = proto.Field( + proto.INT32, + number=4, + ) candidates_token_count: int = proto.Field( proto.INT32, number=2, @@ -1094,8 +1116,8 @@ class CountTokensRequest(proto.Message): Format: ``models/{model}`` contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Optional. The input given to the model as a - prompt. + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is set. generate_content_request (google.ai.generativelanguage_v1beta.types.GenerateContentRequest): Optional. The overall input given to the model. CountTokens will count prompt, function @@ -1128,13 +1150,22 @@ class CountTokensResponse(proto.Message): The number of tokens that the ``model`` tokenizes the ``prompt`` into. - Always non-negative. + Always non-negative. When cached_content is set, this is + still the total effective prompt size. I.e. this includes + the number of tokens in the cached content. + cached_content_token_count (int): + Number of tokens in the cached part of the + prompt, i.e. in the cached content. """ total_tokens: int = proto.Field( proto.INT32, number=1, ) + cached_content_token_count: int = proto.Field( + proto.INT32, + number=5, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py index 52aa43206bd0..82dbdd515047 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py @@ -76,14 +76,18 @@ class Model(proto.Message): temperature (float): Controls the randomness of the output. - Values can range over ``[0.0,1.0]``, inclusive. A value - closer to ``1.0`` will produce responses that are more - varied, while a value closer to ``0.0`` will typically - result in less surprising responses from the model. This - value specifies default to be used by the backend while - making the call to the model. + Values can range over ``[0.0,max_temperature]``, inclusive. + A higher value will produce responses that are more varied, + while a value closer to ``0.0`` will typically result in + less surprising responses from the model. This value + specifies default to be used by the backend while making the + call to the model. This field is a member of `oneof`_ ``_temperature``. + max_temperature (float): + The maximum temperature this model can use. + + This field is a member of `oneof`_ ``_max_temperature``. top_p (float): For Nucleus sampling. @@ -142,6 +146,11 @@ class Model(proto.Message): number=9, optional=True, ) + max_temperature: float = proto.Field( + proto.FLOAT, + number=13, + optional=True, + ) top_p: float = proto.Field( proto.FLOAT, number=10, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py index e72adacd4779..113590701d4b 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py @@ -223,7 +223,7 @@ class SafetySetting(proto.Message): r"""Safety setting, affecting the safety-blocking behavior. Passing a safety setting for a category changes the allowed - proability that content is blocked. + probability that content is blocked. Attributes: category (google.ai.generativelanguage_v1beta.types.HarmCategory): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 558c8aab67c5..1a1d9a4de546 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.6" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 558c8aab67c5..1a1d9a4de546 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.6" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/noxfile.py b/packages/google-ai-generativelanguage/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-ai-generativelanguage/noxfile.py +++ b/packages/google-ai-generativelanguage/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_create_cached_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_create_cached_content_async.py new file mode 100644 index 000000000000..25b7532104c9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_create_cached_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_CacheService_CreateCachedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCachedContentRequest( + ) + + # Make the request + response = await client.create_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_CacheService_CreateCachedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_create_cached_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_create_cached_content_sync.py new file mode 100644 index 000000000000..3a84e200791a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_create_cached_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_CacheService_CreateCachedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCachedContentRequest( + ) + + # Make the request + response = client.create_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_CacheService_CreateCachedContent_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_delete_cached_content_async.py similarity index 71% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py rename to packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_delete_cached_content_async.py index d7c7b8426c7c..41053a2461b4 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_delete_cached_content_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteDataset +# Snippet for DeleteCachedContent # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-ai-generativelanguage -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_sync] +# [START generativelanguage_v1beta_generated_CacheService_DeleteCachedContent_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,20 +31,20 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.ai import generativelanguage_v1beta -def sample_delete_dataset(): +async def sample_delete_cached_content(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = generativelanguage_v1beta.CacheServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( + request = generativelanguage_v1beta.DeleteCachedContentRequest( name="name_value", ) # Make the request - client.delete_dataset(request=request) + await client.delete_cached_content(request=request) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_sync] +# [END generativelanguage_v1beta_generated_CacheService_DeleteCachedContent_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_delete_cached_content_sync.py similarity index 69% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py rename to packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_delete_cached_content_sync.py index 42a6dd4fd7d1..791c6c47380b 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_delete_cached_content_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteDatasetVersion +# Snippet for DeleteCachedContent # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-ai-generativelanguage -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_sync] +# [START generativelanguage_v1beta_generated_CacheService_DeleteCachedContent_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,20 +31,20 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.ai import generativelanguage_v1beta -def sample_delete_dataset_version(): +def sample_delete_cached_content(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = generativelanguage_v1beta.CacheServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( + request = generativelanguage_v1beta.DeleteCachedContentRequest( name="name_value", ) # Make the request - client.delete_dataset_version(request=request) + client.delete_cached_content(request=request) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_sync] +# [END generativelanguage_v1beta_generated_CacheService_DeleteCachedContent_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_get_cached_content_async.py similarity index 72% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py rename to packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_get_cached_content_async.py index 96b00610df7e..7e0885453511 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_get_cached_content_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetDataset +# Snippet for GetCachedContent # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-ai-generativelanguage -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_sync] +# [START generativelanguage_v1beta_generated_CacheService_GetCachedContent_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.ai import generativelanguage_v1beta -def sample_get_dataset(): +async def sample_get_cached_content(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = generativelanguage_v1beta.CacheServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.GetDatasetRequest( + request = generativelanguage_v1beta.GetCachedContentRequest( name="name_value", ) # Make the request - response = client.get_dataset(request=request) + response = await client.get_cached_content(request=request) # Handle the response print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_sync] +# [END generativelanguage_v1beta_generated_CacheService_GetCachedContent_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_get_cached_content_sync.py similarity index 71% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py rename to packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_get_cached_content_sync.py index 203e604a872c..04e62cb22f25 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_get_cached_content_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetDataset +# Snippet for GetCachedContent # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-ai-generativelanguage -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_async] +# [START generativelanguage_v1beta_generated_CacheService_GetCachedContent_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.ai import generativelanguage_v1beta -async def sample_get_dataset(): +def sample_get_cached_content(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = generativelanguage_v1beta.CacheServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.GetDatasetRequest( + request = generativelanguage_v1beta.GetCachedContentRequest( name="name_value", ) # Make the request - response = await client.get_dataset(request=request) + response = client.get_cached_content(request=request) # Handle the response print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_async] +# [END generativelanguage_v1beta_generated_CacheService_GetCachedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_list_cached_contents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_list_cached_contents_async.py new file mode 100644 index 000000000000..91ae2d0fce04 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_list_cached_contents_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCachedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_CacheService_ListCachedContents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_CacheService_ListCachedContents_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_list_cached_contents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_list_cached_contents_sync.py new file mode 100644 index 000000000000..f98eced78c74 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_list_cached_contents_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCachedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_CacheService_ListCachedContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_CacheService_ListCachedContents_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_update_cached_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_update_cached_content_async.py new file mode 100644 index 000000000000..ccbdfbfda614 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_update_cached_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_CacheService_UpdateCachedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCachedContentRequest( + ) + + # Make the request + response = await client.update_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_CacheService_UpdateCachedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_update_cached_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_update_cached_content_sync.py new file mode 100644 index 000000000000..84a365b14236 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_cache_service_update_cached_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_CacheService_UpdateCachedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1beta.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCachedContentRequest( + ) + + # Make the request + response = client.update_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_CacheService_UpdateCachedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index d6c3fe4c5051..3b1eb2b9811d 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.6" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index 9d212987934d..0bf5b8649f4b 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,9 +8,808 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.6" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient.create_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.CreateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "CreateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1beta.types.CachedContent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CachedContent", + "shortName": "create_cached_content" + }, + "description": "Sample for CreateCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_create_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_CreateCachedContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_create_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient.create_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.CreateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "CreateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1beta.types.CachedContent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CachedContent", + "shortName": "create_cached_content" + }, + "description": "Sample for CreateCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_create_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_CreateCachedContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_create_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient.delete_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.DeleteCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "DeleteCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_cached_content" + }, + "description": "Sample for DeleteCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_delete_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_DeleteCachedContent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_delete_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient.delete_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.DeleteCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "DeleteCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_cached_content" + }, + "description": "Sample for DeleteCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_delete_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_DeleteCachedContent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_delete_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient.get_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.GetCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "GetCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CachedContent", + "shortName": "get_cached_content" + }, + "description": "Sample for GetCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_get_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_GetCachedContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_get_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient.get_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.GetCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "GetCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CachedContent", + "shortName": "get_cached_content" + }, + "description": "Sample for GetCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_get_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_GetCachedContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_get_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient.list_cached_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.ListCachedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "ListCachedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListCachedContentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.cache_service.pagers.ListCachedContentsAsyncPager", + "shortName": "list_cached_contents" + }, + "description": "Sample for ListCachedContents", + "file": "generativelanguage_v1beta_generated_cache_service_list_cached_contents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_ListCachedContents_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_list_cached_contents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient.list_cached_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.ListCachedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "ListCachedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListCachedContentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.cache_service.pagers.ListCachedContentsPager", + "shortName": "list_cached_contents" + }, + "description": "Sample for ListCachedContents", + "file": "generativelanguage_v1beta_generated_cache_service_list_cached_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_ListCachedContents_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_list_cached_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceAsyncClient.update_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.UpdateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "UpdateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1beta.types.CachedContent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CachedContent", + "shortName": "update_cached_content" + }, + "description": "Sample for UpdateCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_update_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_UpdateCachedContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_update_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.CacheServiceClient.update_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService.UpdateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.CacheService", + "shortName": "CacheService" + }, + "shortName": "UpdateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1beta.types.CachedContent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CachedContent", + "shortName": "update_cached_content" + }, + "description": "Sample for UpdateCachedContent", + "file": "generativelanguage_v1beta_generated_cache_service_update_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_CacheService_UpdateCachedContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_cache_service_update_cached_content_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index 5b7d0a0509b4..3bdb3d57561c 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.6" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 91de9e353f90..7cbc8b24aef3 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.6" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py index fbce05700724..d399a440b86b 100644 --- a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py @@ -40,7 +40,7 @@ class generativelanguageCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'batch_embed_contents': ('model', 'requests', ), - 'count_tokens': ('model', 'contents', ), + 'count_tokens': ('model', 'contents', 'generate_content_request', ), 'embed_content': ('model', 'content', 'task_type', 'title', 'output_dimensionality', ), 'generate_content': ('model', 'contents', 'safety_settings', 'generation_config', ), 'get_model': ('name', ), diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py index 8e06b053b00d..dcb5cdfbb55c 100644 --- a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py @@ -47,12 +47,14 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'count_message_tokens': ('model', 'prompt', ), 'count_text_tokens': ('model', 'prompt', ), 'count_tokens': ('model', 'contents', 'generate_content_request', ), + 'create_cached_content': ('cached_content', ), 'create_chunk': ('parent', 'chunk', ), 'create_corpus': ('corpus', ), 'create_document': ('parent', 'document', ), 'create_file': ('file', ), 'create_permission': ('parent', 'permission', ), 'create_tuned_model': ('tuned_model', 'tuned_model_id', ), + 'delete_cached_content': ('name', ), 'delete_chunk': ('name', ), 'delete_corpus': ('name', 'force', ), 'delete_document': ('name', 'force', ), @@ -62,9 +64,10 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'embed_content': ('model', 'content', 'task_type', 'title', 'output_dimensionality', ), 'embed_text': ('model', 'text', ), 'generate_answer': ('model', 'contents', 'answer_style', 'inline_passages', 'semantic_retriever', 'safety_settings', 'temperature', ), - 'generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', ), + 'generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), 'generate_message': ('model', 'prompt', 'temperature', 'candidate_count', 'top_p', 'top_k', ), 'generate_text': ('model', 'prompt', 'temperature', 'candidate_count', 'max_output_tokens', 'top_p', 'top_k', 'safety_settings', 'stop_sequences', ), + 'get_cached_content': ('name', ), 'get_chunk': ('name', ), 'get_corpus': ('name', ), 'get_document': ('name', ), @@ -72,6 +75,7 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'get_model': ('name', ), 'get_permission': ('name', ), 'get_tuned_model': ('name', ), + 'list_cached_contents': ('page_size', 'page_token', ), 'list_chunks': ('parent', 'page_size', 'page_token', ), 'list_corpora': ('page_size', 'page_token', ), 'list_documents': ('parent', 'page_size', 'page_token', ), @@ -81,8 +85,9 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'list_tuned_models': ('page_size', 'page_token', 'filter', ), 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), - 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', ), + 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), 'transfer_ownership': ('name', 'email_address', ), + 'update_cached_content': ('cached_content', 'update_mask', ), 'update_chunk': ('chunk', 'update_mask', ), 'update_corpus': ('corpus', 'update_mask', ), 'update_document': ('document', 'update_mask', ), diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py index a4bd49c38e9f..e2af954292b9 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py @@ -4498,15 +4498,7 @@ def test_count_tokens_rest_unset_required_fields(): ) unset_fields = transport.count_tokens._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "model", - "contents", - ) - ) - ) + assert set(unset_fields) == (set(()) & set(("model",))) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py index ea0cbfb1af6d..77d033fb830d 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py @@ -1866,10 +1866,10 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py new file mode 100644 index 000000000000..ff2e6c3ba723 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -0,0 +1,5568 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.cache_service import ( + CacheServiceAsyncClient, + CacheServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1beta.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1beta.types import cache_service +from google.ai.generativelanguage_v1beta.types import cached_content +from google.ai.generativelanguage_v1beta.types import content + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CacheServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CacheServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CacheServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CacheServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CacheServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CacheServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert CacheServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CacheServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CacheServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + CacheServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CacheServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CacheServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CacheServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CacheServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CacheServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CacheServiceClient._get_client_cert_source(None, False) is None + assert ( + CacheServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + CacheServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + CacheServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + CacheServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CacheServiceClient._DEFAULT_UNIVERSE + default_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + CacheServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + CacheServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == CacheServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, default_universe, "always") + == CacheServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CacheServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == CacheServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + CacheServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + CacheServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + CacheServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + CacheServiceClient._get_universe_domain(None, None) + == CacheServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + CacheServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc"), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CacheServiceClient, "grpc"), + (CacheServiceAsyncClient, "grpc_asyncio"), + (CacheServiceClient, "rest"), + ], +) +def test_cache_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CacheServiceGrpcTransport, "grpc"), + (transports.CacheServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CacheServiceRestTransport, "rest"), + ], +) +def test_cache_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CacheServiceClient, "grpc"), + (CacheServiceAsyncClient, "grpc_asyncio"), + (CacheServiceClient, "rest"), + ], +) +def test_cache_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_cache_service_client_get_transport_class(): + transport = CacheServiceClient.get_transport_class() + available_transports = [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceRestTransport, + ] + assert transport in available_transports + + transport = CacheServiceClient.get_transport_class("grpc") + assert transport == transports.CacheServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +def test_cache_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CacheServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CacheServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc", "true"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc", "false"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest", "true"), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cache_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CacheServiceClient, CacheServiceAsyncClient]) +@mock.patch.object( + CacheServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CacheServiceClient) +) +@mock.patch.object( + CacheServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CacheServiceAsyncClient), +) +def test_cache_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [CacheServiceClient, CacheServiceAsyncClient]) +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +def test_cache_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CacheServiceClient._DEFAULT_UNIVERSE + default_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest"), + ], +) +def test_cache_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CacheServiceClient, + transports.CacheServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest", None), + ], +) +def test_cache_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cache_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.cache_service.transports.CacheServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CacheServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CacheServiceClient, + transports.CacheServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cache_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.ListCachedContentsRequest, + dict, + ], +) +def test_list_cached_contents(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.ListCachedContentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCachedContentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cached_contents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_cached_contents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.ListCachedContentsRequest() + + +def test_list_cached_contents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.ListCachedContentsRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_cached_contents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.ListCachedContentsRequest( + page_token="page_token_value", + ) + + +def test_list_cached_contents_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cached_contents in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cached_contents + ] = mock_rpc + request = {} + client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cached_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_cached_contents_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_cached_contents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.ListCachedContentsRequest() + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_cached_contents + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_cached_contents + ] = mock_object + + request = {} + await client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_cached_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_cached_contents_async( + transport: str = "grpc_asyncio", + request_type=cache_service.ListCachedContentsRequest, +): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.ListCachedContentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCachedContentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_from_dict(): + await test_list_cached_contents_async(request_type=dict) + + +def test_list_cached_contents_pager(transport_name: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + pager = client.list_cached_contents(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cached_content.CachedContent) for i in results) + + +def test_list_cached_contents_pages(transport_name: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + pages = list(client.list_cached_contents(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_pager(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_cached_contents( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cached_content.CachedContent) for i in responses) + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_pages(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_cached_contents(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.CreateCachedContentRequest, + dict, + ], +) +def test_create_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + response = client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.CreateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_create_cached_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.CreateCachedContentRequest() + + +def test_create_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.CreateCachedContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.CreateCachedContentRequest() + + +def test_create_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cached_content + ] = mock_rpc + request = {} + client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cached_content_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.create_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.CreateCachedContentRequest() + + +@pytest.mark.asyncio +async def test_create_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_cached_content + ] = mock_object + + request = {} + await client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cached_content_async( + transport: str = "grpc_asyncio", + request_type=cache_service.CreateCachedContentRequest, +): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.CreateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.asyncio +async def test_create_cached_content_async_from_dict(): + await test_create_cached_content_async(request_type=dict) + + +def test_create_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + + +def test_create_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cached_content( + cache_service.CreateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +@pytest.mark.asyncio +async def test_create_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cached_content( + cache_service.CreateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.GetCachedContentRequest, + dict, + ], +) +def test_get_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + response = client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.GetCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_get_cached_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.GetCachedContentRequest() + + +def test_get_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.GetCachedContentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.GetCachedContentRequest( + name="name_value", + ) + + +def test_get_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cached_content in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cached_content + ] = mock_rpc + request = {} + client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cached_content_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.get_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.GetCachedContentRequest() + + +@pytest.mark.asyncio +async def test_get_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cached_content + ] = mock_object + + request = {} + await client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cached_content_async( + transport: str = "grpc_asyncio", request_type=cache_service.GetCachedContentRequest +): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.GetCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.asyncio +async def test_get_cached_content_async_from_dict(): + await test_get_cached_content_async(request_type=dict) + + +def test_get_cached_content_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.GetCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value = cached_content.CachedContent() + client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cached_content_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.GetCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent() + ) + await client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cached_content.CachedContent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cached_content( + cache_service.GetCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cached_content.CachedContent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cached_content( + cache_service.GetCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.UpdateCachedContentRequest, + dict, + ], +) +def test_update_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + response = client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.UpdateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_update_cached_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.UpdateCachedContentRequest() + + +def test_update_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.UpdateCachedContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.UpdateCachedContentRequest() + + +def test_update_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cached_content + ] = mock_rpc + request = {} + client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cached_content_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.update_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.UpdateCachedContentRequest() + + +@pytest.mark.asyncio +async def test_update_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cached_content + ] = mock_object + + request = {} + await client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cached_content_async( + transport: str = "grpc_asyncio", + request_type=cache_service.UpdateCachedContentRequest, +): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.UpdateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.asyncio +async def test_update_cached_content_async_from_dict(): + await test_update_cached_content_async(request_type=dict) + + +def test_update_cached_content_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.UpdateCachedContentRequest() + + request.cached_content.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value = gag_cached_content.CachedContent() + client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cached_content.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cached_content_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.UpdateCachedContentRequest() + + request.cached_content.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent() + ) + await client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cached_content.name=name_value", + ) in kw["metadata"] + + +def test_update_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cached_content( + cache_service.UpdateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cached_content( + cache_service.UpdateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.DeleteCachedContentRequest, + dict, + ], +) +def test_delete_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.DeleteCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_cached_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.DeleteCachedContentRequest() + + +def test_delete_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.DeleteCachedContentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.DeleteCachedContentRequest( + name="name_value", + ) + + +def test_delete_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cached_content + ] = mock_rpc + request = {} + client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cached_content_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_cached_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.DeleteCachedContentRequest() + + +@pytest.mark.asyncio +async def test_delete_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_cached_content + ] = mock_object + + request = {} + await client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cached_content_async( + transport: str = "grpc_asyncio", + request_type=cache_service.DeleteCachedContentRequest, +): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.DeleteCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_cached_content_async_from_dict(): + await test_delete_cached_content_async(request_type=dict) + + +def test_delete_cached_content_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.DeleteCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value = None + client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cached_content_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.DeleteCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cached_content( + cache_service.DeleteCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cached_content( + cache_service.DeleteCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.ListCachedContentsRequest, + dict, + ], +) +def test_list_cached_contents_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cache_service.ListCachedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_cached_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCachedContentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cached_contents_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cached_contents in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cached_contents + ] = mock_rpc + + request = {} + client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cached_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cached_contents_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_list_cached_contents" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_list_cached_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cache_service.ListCachedContentsRequest.pb( + cache_service.ListCachedContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cache_service.ListCachedContentsResponse.to_json( + cache_service.ListCachedContentsResponse() + ) + + request = cache_service.ListCachedContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cache_service.ListCachedContentsResponse() + + client.list_cached_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cached_contents_rest_bad_request( + transport: str = "rest", request_type=cache_service.ListCachedContentsRequest +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cached_contents(request) + + +def test_list_cached_contents_rest_pager(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cache_service.ListCachedContentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_cached_contents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cached_content.CachedContent) for i in results) + + pages = list(client.list_cached_contents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.CreateCachedContentRequest, + dict, + ], +) +def test_create_cached_content_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["cached_content"] = { + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "name_value", + "display_name": "display_name_value", + "model": "model_value", + "system_instruction": { + "parts": [ + { + "text": "text_value", + "inline_data": { + "mime_type": "mime_type_value", + "data": b"data_blob", + }, + "function_call": {"name": "name_value", "args": {"fields": {}}}, + "function_response": {"name": "name_value", "response": {}}, + "file_data": { + "mime_type": "mime_type_value", + "file_uri": "file_uri_value", + }, + "executable_code": {"language": 1, "code": "code_value"}, + "code_execution_result": {"outcome": 1, "output": "output_value"}, + } + ], + "role": "role_value", + }, + "contents": {}, + "tools": [ + { + "function_declarations": [ + { + "name": "name_value", + "description": "description_value", + "parameters": { + "type_": 1, + "format_": "format__value", + "description": "description_value", + "nullable": True, + "enum": ["enum_value1", "enum_value2"], + "items": {}, + "properties": {}, + "required": ["required_value1", "required_value2"], + }, + } + ], + "code_execution": {}, + } + ], + "tool_config": { + "function_calling_config": { + "mode": 1, + "allowed_function_names": [ + "allowed_function_names_value1", + "allowed_function_names_value2", + ], + } + }, + "create_time": {}, + "update_time": {}, + "usage_metadata": {"total_token_count": 1836}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cache_service.CreateCachedContentRequest.meta.fields["cached_content"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cached_content"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cached_content"][field])): + del request_init["cached_content"][field][i][subfield] + else: + del request_init["cached_content"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cached_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_create_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cached_content + ] = mock_rpc + + request = {} + client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cached_content_rest_required_fields( + request_type=cache_service.CreateCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("cachedContent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_create_cached_content" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_create_cached_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cache_service.CreateCachedContentRequest.pb( + cache_service.CreateCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gag_cached_content.CachedContent.to_json( + gag_cached_content.CachedContent() + ) + + request = cache_service.CreateCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_cached_content.CachedContent() + + client.create_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cached_content_rest_bad_request( + transport: str = "rest", request_type=cache_service.CreateCachedContentRequest +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cached_content(request) + + +def test_create_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/cachedContents" % client.transport._host, args[1] + ) + + +def test_create_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cached_content( + cache_service.CreateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +def test_create_cached_content_rest_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.GetCachedContentRequest, + dict, + ], +) +def test_get_cached_content_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cached_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_get_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cached_content in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cached_content + ] = mock_rpc + + request = {} + client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cached_content_rest_required_fields( + request_type=cache_service.GetCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cached_content.CachedContent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_get_cached_content" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_get_cached_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cache_service.GetCachedContentRequest.pb( + cache_service.GetCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cached_content.CachedContent.to_json( + cached_content.CachedContent() + ) + + request = cache_service.GetCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cached_content.CachedContent() + + client.get_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cached_content_rest_bad_request( + transport: str = "rest", request_type=cache_service.GetCachedContentRequest +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cached_content(request) + + +def test_get_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cached_content.CachedContent() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "cachedContents/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=cachedContents/*}" % client.transport._host, args[1] + ) + + +def test_get_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cached_content( + cache_service.GetCachedContentRequest(), + name="name_value", + ) + + +def test_get_cached_content_rest_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.UpdateCachedContentRequest, + dict, + ], +) +def test_update_cached_content_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"cached_content": {"name": "cachedContents/sample1"}} + request_init["cached_content"] = { + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "cachedContents/sample1", + "display_name": "display_name_value", + "model": "model_value", + "system_instruction": { + "parts": [ + { + "text": "text_value", + "inline_data": { + "mime_type": "mime_type_value", + "data": b"data_blob", + }, + "function_call": {"name": "name_value", "args": {"fields": {}}}, + "function_response": {"name": "name_value", "response": {}}, + "file_data": { + "mime_type": "mime_type_value", + "file_uri": "file_uri_value", + }, + "executable_code": {"language": 1, "code": "code_value"}, + "code_execution_result": {"outcome": 1, "output": "output_value"}, + } + ], + "role": "role_value", + }, + "contents": {}, + "tools": [ + { + "function_declarations": [ + { + "name": "name_value", + "description": "description_value", + "parameters": { + "type_": 1, + "format_": "format__value", + "description": "description_value", + "nullable": True, + "enum": ["enum_value1", "enum_value2"], + "items": {}, + "properties": {}, + "required": ["required_value1", "required_value2"], + }, + } + ], + "code_execution": {}, + } + ], + "tool_config": { + "function_calling_config": { + "mode": 1, + "allowed_function_names": [ + "allowed_function_names_value1", + "allowed_function_names_value2", + ], + } + }, + "create_time": {}, + "update_time": {}, + "usage_metadata": {"total_token_count": 1836}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cache_service.UpdateCachedContentRequest.meta.fields["cached_content"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cached_content"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cached_content"][field])): + del request_init["cached_content"][field][i][subfield] + else: + del request_init["cached_content"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cached_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_update_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cached_content + ] = mock_rpc + + request = {} + client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_cached_content_rest_required_fields( + request_type=cache_service.UpdateCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cached_content._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("cachedContent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_update_cached_content" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_update_cached_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cache_service.UpdateCachedContentRequest.pb( + cache_service.UpdateCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gag_cached_content.CachedContent.to_json( + gag_cached_content.CachedContent() + ) + + request = cache_service.UpdateCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_cached_content.CachedContent() + + client.update_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cached_content_rest_bad_request( + transport: str = "rest", request_type=cache_service.UpdateCachedContentRequest +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"cached_content": {"name": "cachedContents/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cached_content(request) + + +def test_update_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + + # get arguments that satisfy an http rule for this method + sample_request = {"cached_content": {"name": "cachedContents/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{cached_content.name=cachedContents/*}" % client.transport._host, + args[1], + ) + + +def test_update_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cached_content( + cache_service.UpdateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cached_content_rest_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.DeleteCachedContentRequest, + dict, + ], +) +def test_delete_cached_content_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cached_content(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cached_content + ] = mock_rpc + + request = {} + client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cached_content_rest_required_fields( + request_type=cache_service.DeleteCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_delete_cached_content" + ) as pre: + pre.assert_not_called() + pb_message = cache_service.DeleteCachedContentRequest.pb( + cache_service.DeleteCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cache_service.DeleteCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_cached_content_rest_bad_request( + transport: str = "rest", request_type=cache_service.DeleteCachedContentRequest +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cached_content(request) + + +def test_delete_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "cachedContents/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=cachedContents/*}" % client.transport._host, args[1] + ) + + +def test_delete_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cached_content( + cache_service.DeleteCachedContentRequest(), + name="name_value", + ) + + +def test_delete_cached_content_rest_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CacheServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CacheServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceGrpcAsyncIOTransport, + transports.CacheServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CacheServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CacheServiceGrpcTransport, + ) + + +def test_cache_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CacheServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cache_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.cache_service.transports.CacheServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CacheServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_cached_contents", + "create_cached_content", + "get_cached_content", + "update_cached_content", + "delete_cached_content", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cache_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.cache_service.transports.CacheServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CacheServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_cache_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.cache_service.transports.CacheServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CacheServiceTransport() + adc.assert_called_once() + + +def test_cache_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CacheServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceGrpcAsyncIOTransport, + ], +) +def test_cache_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceGrpcAsyncIOTransport, + transports.CacheServiceRestTransport, + ], +) +def test_cache_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CacheServiceGrpcTransport, grpc_helpers), + (transports.CacheServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cache_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CacheServiceGrpcTransport, transports.CacheServiceGrpcAsyncIOTransport], +) +def test_cache_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cache_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CacheServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cache_service_host_no_port(transport_name): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cache_service_host_with_port(transport_name): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cache_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CacheServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CacheServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_cached_contents._session + session2 = client2.transport.list_cached_contents._session + assert session1 != session2 + session1 = client1.transport.create_cached_content._session + session2 = client2.transport.create_cached_content._session + assert session1 != session2 + session1 = client1.transport.get_cached_content._session + session2 = client2.transport.get_cached_content._session + assert session1 != session2 + session1 = client1.transport.update_cached_content._session + session2 = client2.transport.update_cached_content._session + assert session1 != session2 + session1 = client1.transport.delete_cached_content._session + session2 = client2.transport.delete_cached_content._session + assert session1 != session2 + + +def test_cache_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CacheServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cache_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CacheServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CacheServiceGrpcTransport, transports.CacheServiceGrpcAsyncIOTransport], +) +def test_cache_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CacheServiceGrpcTransport, transports.CacheServiceGrpcAsyncIOTransport], +) +def test_cache_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cached_content_path(): + id = "squid" + expected = "cachedContents/{id}".format( + id=id, + ) + actual = CacheServiceClient.cached_content_path(id) + assert expected == actual + + +def test_parse_cached_content_path(): + expected = { + "id": "clam", + } + path = CacheServiceClient.cached_content_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_cached_content_path(path) + assert expected == actual + + +def test_model_path(): + model = "whelk" + expected = "models/{model}".format( + model=model, + ) + actual = CacheServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "octopus", + } + path = CacheServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CacheServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = CacheServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CacheServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = CacheServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CacheServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = CacheServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = CacheServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = CacheServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CacheServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = CacheServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CacheServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CacheServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CacheServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CacheServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport), + (CacheServiceAsyncClient, transports.CacheServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py index 41f8708f621d..2ebbcdf08ec0 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py @@ -34,8 +34,11 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -1556,10 +1559,10 @@ def test_list_files_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_files(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py index 0dcaade9f213..62d9f9022ac3 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py @@ -1226,6 +1226,7 @@ def test_generate_content_non_empty_request_with_auto_populated_field(): # if they meet the requirements of AIP 4235. request = generative_service.GenerateContentRequest( model="model_value", + cached_content="cached_content_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1238,6 +1239,7 @@ def test_generate_content_non_empty_request_with_auto_populated_field(): _, args, _ = call.mock_calls[0] assert args[0] == generative_service.GenerateContentRequest( model="model_value", + cached_content="cached_content_value", ) @@ -2025,6 +2027,7 @@ def test_stream_generate_content_non_empty_request_with_auto_populated_field(): # if they meet the requirements of AIP 4235. request = generative_service.GenerateContentRequest( model="model_value", + cached_content="cached_content_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2039,6 +2042,7 @@ def test_stream_generate_content_non_empty_request_with_auto_populated_field(): _, args, _ = call.mock_calls[0] assert args[0] == generative_service.GenerateContentRequest( model="model_value", + cached_content="cached_content_value", ) @@ -3142,6 +3146,7 @@ def test_count_tokens(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = generative_service.CountTokensResponse( total_tokens=1303, + cached_content_token_count=2746, ) response = client.count_tokens(request) @@ -3154,6 +3159,7 @@ def test_count_tokens(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, generative_service.CountTokensResponse) assert response.total_tokens == 1303 + assert response.cached_content_token_count == 2746 def test_count_tokens_empty_call(): @@ -3253,6 +3259,7 @@ async def test_count_tokens_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( generative_service.CountTokensResponse( total_tokens=1303, + cached_content_token_count=2746, ) ) response = await client.count_tokens() @@ -3326,6 +3333,7 @@ async def test_count_tokens_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( generative_service.CountTokensResponse( total_tokens=1303, + cached_content_token_count=2746, ) ) response = await client.count_tokens(request) @@ -3339,6 +3347,7 @@ async def test_count_tokens_async( # Establish that the response is the type that we expect. assert isinstance(response, generative_service.CountTokensResponse) assert response.total_tokens == 1303 + assert response.cached_content_token_count == 2746 @pytest.mark.asyncio @@ -5115,6 +5124,7 @@ def test_count_tokens_rest(request_type): # Designate an appropriate value for the returned response. return_value = generative_service.CountTokensResponse( total_tokens=1303, + cached_content_token_count=2746, ) # Wrap the value into a proper Response obj @@ -5131,6 +5141,7 @@ def test_count_tokens_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, generative_service.CountTokensResponse) assert response.total_tokens == 1303 + assert response.cached_content_token_count == 2746 def test_count_tokens_rest_use_cached_wrapped_rpc(): @@ -5946,8 +5957,28 @@ def test_generative_service_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_cached_content_path(): + id = "squid" + expected = "cachedContents/{id}".format( + id=id, + ) + actual = GenerativeServiceClient.cached_content_path(id) + assert expected == actual + + +def test_parse_cached_content_path(): + expected = { + "id": "clam", + } + path = GenerativeServiceClient.cached_content_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_cached_content_path(path) + assert expected == actual + + def test_model_path(): - model = "squid" + model = "whelk" expected = "models/{model}".format( model=model, ) @@ -5957,7 +5988,7 @@ def test_model_path(): def test_parse_model_path(): expected = { - "model": "clam", + "model": "octopus", } path = GenerativeServiceClient.model_path(**expected) @@ -5967,7 +5998,7 @@ def test_parse_model_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5977,7 +6008,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nudibranch", } path = GenerativeServiceClient.common_billing_account_path(**expected) @@ -5987,7 +6018,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -5997,7 +6028,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "mussel", } path = GenerativeServiceClient.common_folder_path(**expected) @@ -6007,7 +6038,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -6017,7 +6048,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nautilus", } path = GenerativeServiceClient.common_organization_path(**expected) @@ -6027,7 +6058,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -6037,7 +6068,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "abalone", } path = GenerativeServiceClient.common_project_path(**expected) @@ -6047,8 +6078,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6059,8 +6090,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "whelk", + "location": "octopus", } path = GenerativeServiceClient.common_location_path(**expected) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index ffed72e6d193..b3c21ee2f8d4 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -1137,6 +1137,7 @@ def test_get_model(request_type, transport: str = "grpc"): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1161,6 +1162,7 @@ def test_get_model(request_type, transport: str = "grpc"): "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 @@ -1270,6 +1272,7 @@ async def test_get_model_empty_call_async(): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1351,6 +1354,7 @@ async def test_get_model_async( output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1376,6 +1380,7 @@ async def test_get_model_async( "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 @@ -1879,10 +1884,10 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2804,10 +2809,10 @@ def test_list_tuned_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tuned_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4177,6 +4182,7 @@ def test_get_model_rest(request_type): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -4205,6 +4211,7 @@ def test_get_model_rest(request_type): "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py index 2db14ddb7a61..afa1e1f85b8e 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py @@ -2369,13 +2369,13 @@ def test_list_permissions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_permissions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py index 2b48ce526182..0837cef9f258 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py @@ -2816,10 +2816,10 @@ def test_list_corpora_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_corpora(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5137,13 +5137,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8344,13 +8344,13 @@ def test_list_chunks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_chunks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py index caab40dbdef1..c7150a72b9e8 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py @@ -1865,10 +1865,10 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py index 48d9c2f84748..bfbda0cb5313 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py @@ -1879,10 +1879,10 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2802,10 +2802,10 @@ def test_list_tuned_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tuned_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py index d1d5cc9aa73e..204f74a70968 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py @@ -2369,13 +2369,13 @@ def test_list_permissions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_permissions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index c9b9eb2be2f1..f07c20a08ed9 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -1852,10 +1852,10 @@ def test_list_accounts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_accounts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3264,10 +3264,10 @@ def test_list_account_summaries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_account_summaries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4090,10 +4090,10 @@ def test_list_properties_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_properties(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6614,13 +6614,13 @@ def test_list_firebase_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_firebase_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8813,13 +8813,13 @@ def test_list_google_ads_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_google_ads_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10227,13 +10227,13 @@ def test_list_measurement_protocol_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_measurement_protocol_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13975,13 +13975,13 @@ def test_list_sk_ad_network_conversion_value_schemas_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sk_ad_network_conversion_value_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14492,13 +14492,13 @@ def test_search_change_history_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("account", ""),)), ) pager = client.search_change_history_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17558,13 +17558,13 @@ def test_list_conversion_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversion_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -18554,13 +18554,13 @@ def test_list_display_video360_advertiser_links_pager(transport_name: str = "grp RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_display_video360_advertiser_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20814,13 +20814,13 @@ def test_list_display_video360_advertiser_link_proposals_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_display_video360_advertiser_link_proposals(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -23784,13 +23784,13 @@ def test_list_custom_dimensions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_dimensions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -26050,13 +26050,13 @@ def test_list_custom_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_metrics(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -29513,13 +29513,13 @@ def test_list_data_streams_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_streams(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -30855,13 +30855,13 @@ def test_list_audiences_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_audiences(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -32936,13 +32936,13 @@ def test_list_search_ads360_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_search_ads360_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -37491,13 +37491,13 @@ def test_list_access_bindings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_access_bindings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -39687,13 +39687,13 @@ def test_list_expanded_data_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_expanded_data_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -41902,13 +41902,13 @@ def test_list_channel_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channel_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -44614,13 +44614,13 @@ def test_list_big_query_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_big_query_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -48225,13 +48225,13 @@ def test_list_ad_sense_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_ad_sense_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -49221,13 +49221,13 @@ def test_list_event_create_rules_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_event_create_rules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -52737,13 +52737,13 @@ def test_list_calculated_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_calculated_metrics(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -54779,13 +54779,13 @@ def test_list_rollup_property_source_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_rollup_property_source_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -57216,13 +57216,13 @@ def test_list_subproperty_event_filters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_subproperty_event_filters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py index 6c442938a9e0..1d3c5be88174 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py @@ -1839,10 +1839,10 @@ def test_list_accounts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_accounts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3251,10 +3251,10 @@ def test_list_account_summaries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_account_summaries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4077,10 +4077,10 @@ def test_list_properties_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_properties(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6601,13 +6601,13 @@ def test_list_firebase_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_firebase_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8403,13 +8403,13 @@ def test_list_google_ads_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_google_ads_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9817,13 +9817,13 @@ def test_list_measurement_protocol_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_measurement_protocol_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11844,13 +11844,13 @@ def test_search_change_history_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("account", ""),)), ) pager = client.search_change_history_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14086,13 +14086,13 @@ def test_list_conversion_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversion_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15531,13 +15531,13 @@ def test_list_custom_dimensions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_dimensions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17797,13 +17797,13 @@ def test_list_custom_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_metrics(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -21260,13 +21260,13 @@ def test_list_data_streams_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_streams(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 39d319f7db75..a71b4530d0b2 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -3540,13 +3540,13 @@ def test_list_audience_lists_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_audience_lists(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4977,13 +4977,13 @@ def test_list_recurring_audience_lists_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_recurring_audience_lists(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6733,13 +6733,13 @@ def test_list_report_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_report_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 6fd7006faa6a..2ee8cfd491fc 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -4998,13 +4998,13 @@ def test_list_audience_exports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_audience_exports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index 59e95dc1994b..d4e720465a30 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.6...google-apps-chat-v0.1.7) (2024-06-27) + + +### Documentation + +* Update doc for `CreateMembership` in service `ChatService` to support group members ([5c8eaae](https://github.com/googleapis/google-cloud-python/commit/5c8eaae2289427f56c730bbf3e7e78b15a35580a)) +* Update doc for field `group_member` in message `google.chat.v1.Membership` ([5c8eaae](https://github.com/googleapis/google-cloud-python/commit/5c8eaae2289427f56c730bbf3e7e78b15a35580a)) +* Update doc for SetUpSpace in service ChatService to support group members ([5c8eaae](https://github.com/googleapis/google-cloud-python/commit/5c8eaae2289427f56c730bbf3e7e78b15a35580a)) + ## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.5...google-apps-chat-v0.1.6) (2024-05-16) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 51d2795b9d6b..cf5493b86bbc 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.1.7" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 51d2795b9d6b..cf5493b86bbc 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.1.7" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 41cc6e33682d..260b253f63b5 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -1785,6 +1785,18 @@ async def set_up_space( the space by setting the ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + To specify the Google groups to add, add memberships with the + appropriate ``membership.group_member.name``. To add or invite a + Google group, use ``groups/{group}``, where ``{group}`` is the + ``id`` for the group from the Cloud Identity Groups API. For + example, you can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add the group to the space + by setting the ``membership.group_member.name`` to + ``groups/123456789``. Group email is not supported, and Google + groups can only be added as members in named spaces. + For a named space or group chat, if the caller blocks, or is blocked by some members, or doesn't have permission to add some members, then those members aren't added to the created space. @@ -2367,7 +2379,8 @@ async def create_membership( authentication `__. To specify the member to add, set the ``membership.member.name`` - for the human or app member. + for the human or app member, or set the + ``membership.group_member.name`` for the group member. - To add the calling app to a space or a direct message between two human users, use ``users/app``. Unable to add other apps @@ -2383,6 +2396,18 @@ async def create_membership( ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + - To add or invite a Google group in a named space, use + ``groups/{group}``, where ``{group}`` is the ``id`` for the + group from the Cloud Identity Groups API. For example, you + can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add or invite the group + to a named space by setting the + ``membership.group_member.name`` to ``groups/123456789``. + Group email is not supported, and Google groups can only be + added as members in named spaces. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index b009e74fc474..793c9f70ac4e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -2297,6 +2297,18 @@ def set_up_space( the space by setting the ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + To specify the Google groups to add, add memberships with the + appropriate ``membership.group_member.name``. To add or invite a + Google group, use ``groups/{group}``, where ``{group}`` is the + ``id`` for the group from the Cloud Identity Groups API. For + example, you can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add the group to the space + by setting the ``membership.group_member.name`` to + ``groups/123456789``. Group email is not supported, and Google + groups can only be added as members in named spaces. + For a named space or group chat, if the caller blocks, or is blocked by some members, or doesn't have permission to add some members, then those members aren't added to the created space. @@ -2867,7 +2879,8 @@ def create_membership( authentication `__. To specify the member to add, set the ``membership.member.name`` - for the human or app member. + for the human or app member, or set the + ``membership.group_member.name`` for the group member. - To add the calling app to a space or a direct message between two human users, use ``users/app``. Unable to add other apps @@ -2883,6 +2896,18 @@ def create_membership( ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + - To add or invite a Google group in a named space, use + ``groups/{group}``, where ``{group}`` is the ``id`` for the + group from the Cloud Identity Groups API. For example, you + can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add or invite the group + to a named space by setting the + ``membership.group_member.name`` to ``groups/123456789``. + Group email is not supported, and Google groups can only be + added as members in named spaces. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index d1cc702756dc..91b0dfd24735 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -700,6 +700,18 @@ def set_up_space(self) -> Callable[[space_setup.SetUpSpaceRequest], space.Space] the space by setting the ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + To specify the Google groups to add, add memberships with the + appropriate ``membership.group_member.name``. To add or invite a + Google group, use ``groups/{group}``, where ``{group}`` is the + ``id`` for the group from the Cloud Identity Groups API. For + example, you can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add the group to the space + by setting the ``membership.group_member.name`` to + ``groups/123456789``. Group email is not supported, and Google + groups can only be added as members in named spaces. + For a named space or group chat, if the caller blocks, or is blocked by some members, or doesn't have permission to add some members, then those members aren't added to the created space. @@ -907,7 +919,8 @@ def create_membership( authentication `__. To specify the member to add, set the ``membership.member.name`` - for the human or app member. + for the human or app member, or set the + ``membership.group_member.name`` for the group member. - To add the calling app to a space or a direct message between two human users, use ``users/app``. Unable to add other apps @@ -923,6 +936,18 @@ def create_membership( ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + - To add or invite a Google group in a named space, use + ``groups/{group}``, where ``{group}`` is the ``id`` for the + group from the Cloud Identity Groups API. For example, you + can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add or invite the group + to a named space by setting the + ``membership.group_member.name`` to ``groups/123456789``. + Group email is not supported, and Google groups can only be + added as members in named spaces. + Returns: Callable[[~.CreateMembershipRequest], ~.Membership]: diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index 4fe301802dba..029860bf1f2c 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -714,6 +714,18 @@ def set_up_space( the space by setting the ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + To specify the Google groups to add, add memberships with the + appropriate ``membership.group_member.name``. To add or invite a + Google group, use ``groups/{group}``, where ``{group}`` is the + ``id`` for the group from the Cloud Identity Groups API. For + example, you can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add the group to the space + by setting the ``membership.group_member.name`` to + ``groups/123456789``. Group email is not supported, and Google + groups can only be added as members in named spaces. + For a named space or group chat, if the caller blocks, or is blocked by some members, or doesn't have permission to add some members, then those members aren't added to the created space. @@ -927,7 +939,8 @@ def create_membership( authentication `__. To specify the member to add, set the ``membership.member.name`` - for the human or app member. + for the human or app member, or set the + ``membership.group_member.name`` for the group member. - To add the calling app to a space or a direct message between two human users, use ``users/app``. Unable to add other apps @@ -943,6 +956,18 @@ def create_membership( ``membership.member.name`` to ``users/user@example.com`` or ``users/123456789``. + - To add or invite a Google group in a named space, use + ``groups/{group}``, where ``{group}`` is the ``id`` for the + group from the Cloud Identity Groups API. For example, you + can use `Cloud Identity Groups lookup + API `__ + to retrieve the ID ``123456789`` for group email + ``group@example.com``, then you can add or invite the group + to a named space by setting the + ``membership.group_member.name`` to ``groups/123456789``. + Group email is not supported, and Google groups can only be + added as members in named spaces. + Returns: Callable[[~.CreateMembershipRequest], Awaitable[~.Membership]]: diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py index 0fafc7201bc3..c3bea5d64d30 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py @@ -73,9 +73,7 @@ class Membership(proto.Message): This field is a member of `oneof`_ ``memberType``. group_member (google.apps.chat_v1.types.Group): The Google Group the membership corresponds - to. Only supports read operations. Other - operations, like creating or updating a - membership, aren't currently supported. + to. This field is a member of `oneof`_ ``memberType``. create_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space_setup.py b/packages/google-apps-chat/google/apps/chat_v1/types/space_setup.py index 8f7fd902d1af..6f5bcc7a3029 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space_setup.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space_setup.py @@ -69,8 +69,9 @@ class SetUpSpaceRequest(proto.Message): Chat app with a different authenticated user returns an error. memberships (MutableSequence[google.apps.chat_v1.types.Membership]): - Optional. The Google Chat users to invite to join the space. - Omit the calling user, as they are added automatically. + Optional. The Google Chat users or groups to invite to join + the space. Omit the calling user, as they are added + automatically. The set currently allows up to 20 memberships (in addition to the caller). @@ -86,6 +87,11 @@ class SetUpSpaceRequest(proto.Message): users from external Google Workspace domains, user's email must be used for ``{user}``. + For Google group membership, the ``Membership.group_member`` + field must contain a ``group`` with ``name`` populated + (format ``groups/{group}``). You can only add Google groups + when setting ``Space.spaceType`` to ``SPACE``. + Optional when setting ``Space.spaceType`` to ``SPACE``. Required when setting ``Space.spaceType`` to ``GROUP_CHAT``, diff --git a/packages/google-apps-chat/noxfile.py b/packages/google-apps-chat/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-apps-chat/noxfile.py +++ b/packages/google-apps-chat/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index dd8b86f02e43..a6989b0c4963 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.6" + "version": "0.1.7" }, "snippets": [ { diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 176bbdd70f9f..e2cdc7d4d3fe 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -1957,13 +1957,13 @@ def test_list_messages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_messages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2522,13 +2522,13 @@ def test_list_memberships_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_memberships(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5159,10 +5159,10 @@ def test_list_spaces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_spaces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9748,13 +9748,13 @@ def test_list_reactions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reactions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py index 4d47f716d4a3..0d6a58ad1daf 100644 --- a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py +++ b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py @@ -2728,10 +2728,10 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_subscriptions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py index 8808e658da30..8407eab0d994 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py @@ -1877,10 +1877,10 @@ def test_list_conference_records_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_conference_records(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2839,13 +2839,13 @@ def test_list_participants_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_participants(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3825,13 +3825,13 @@ def test_list_participant_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_participant_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4762,13 +4762,13 @@ def test_list_recordings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_recordings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5695,13 +5695,13 @@ def test_list_transcripts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transcripts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6687,13 +6687,13 @@ def test_list_transcript_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transcript_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py index 552123c80f71..fd62f9332932 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py @@ -1874,10 +1874,10 @@ def test_list_conference_records_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_conference_records(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2836,13 +2836,13 @@ def test_list_participants_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_participants(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3822,13 +3822,13 @@ def test_list_participant_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_participant_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4759,13 +4759,13 @@ def test_list_recordings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_recordings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5692,13 +5692,13 @@ def test_list_transcripts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transcripts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6684,13 +6684,13 @@ def test_list_transcript_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transcript_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py index aa02e7fce7b5..1b00d66422f9 100644 --- a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py @@ -1752,10 +1752,10 @@ def test_list_tables_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tables(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2535,10 +2535,10 @@ def test_list_workspaces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_workspaces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3456,13 +3456,13 @@ def test_list_rows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_rows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py index 3b689d82117b..7cb8b234acf4 100644 --- a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py +++ b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py @@ -1554,13 +1554,13 @@ def test_list_approval_requests_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_approval_requests(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py index 3a8d6fb7e160..c340440e6004 100644 --- a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -1648,13 +1648,13 @@ def test_list_notifications_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_notifications(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index b8bf54909da5..ce5a1b329b5e 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -1530,13 +1530,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4766,13 +4766,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8760,13 +8760,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10910,13 +10910,13 @@ def test_list_supported_database_flags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_supported_database_flags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12289,13 +12289,13 @@ def test_list_users_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_users(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index 547dd5afab29..b5707760ee55 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -1530,13 +1530,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4776,13 +4776,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8785,13 +8785,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10945,13 +10945,13 @@ def test_list_supported_database_flags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_supported_database_flags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12346,13 +12346,13 @@ def test_list_users_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_users(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14436,13 +14436,13 @@ def test_list_databases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_databases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index 1b94c97902b6..983259a9e6f2 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -1530,13 +1530,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4771,13 +4771,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8775,13 +8775,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10930,13 +10930,13 @@ def test_list_supported_database_flags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_supported_database_flags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12331,13 +12331,13 @@ def test_list_users_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_users(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14421,13 +14421,13 @@ def test_list_databases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_databases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py index cdd95a9326ad..325bf4db1305 100644 --- a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py +++ b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py @@ -1584,13 +1584,13 @@ def test_list_gateways_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_gateways(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3672,13 +3672,13 @@ def test_list_apis_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_apis(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5751,13 +5751,13 @@ def test_list_api_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_api_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py index 87e3cfce77b5..87b9aaef5bfb 100644 --- a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py +++ b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py @@ -1862,13 +1862,13 @@ def test_list_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py index 1fc86070827c..c6df215fef3f 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py @@ -1526,13 +1526,13 @@ def test_list_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py index 9e883ece9ef7..f8aef2ee7f91 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py @@ -1477,13 +1477,13 @@ def test_list_apis_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_apis(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3606,13 +3606,13 @@ def test_list_api_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_api_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5777,13 +5777,13 @@ def test_list_api_specs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_api_specs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8633,13 +8633,13 @@ def test_list_api_spec_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_api_spec_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10006,13 +10006,13 @@ def test_list_api_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_api_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12565,13 +12565,13 @@ def test_list_api_deployment_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_api_deployment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13916,13 +13916,13 @@ def test_list_artifacts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_artifacts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py index 74d0b92e8877..43e715b518ee 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py @@ -1540,13 +1540,13 @@ def test_list_authorized_certificates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_authorized_certificates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py index 58f46bac3230..a5b9d49160f2 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py @@ -1505,13 +1505,13 @@ def test_list_authorized_domains_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_authorized_domains(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py index ec4cba840df7..80b0be445740 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py @@ -1479,13 +1479,13 @@ def test_list_domain_mappings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_domain_mappings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py index fc385c9c1d39..3f93afd4e539 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py @@ -1413,13 +1413,13 @@ def test_list_ingress_rules_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_ingress_rules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py index cb19e9e37b41..aec1c8c20e03 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py @@ -1414,13 +1414,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py index 0361f4994bdc..204652336ccf 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py @@ -1404,13 +1404,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py index 5334d172d3cc..5c9ed576ac00 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py @@ -1406,13 +1406,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py index 696e4fe9893e..566e0fc90f1c 100644 --- a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py +++ b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py @@ -1904,13 +1904,13 @@ def test_list_service_project_attachments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_service_project_attachments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4135,13 +4135,13 @@ def test_list_discovered_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_discovered_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5501,13 +5501,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7630,13 +7630,13 @@ def test_list_discovered_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_discovered_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8996,13 +8996,13 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11121,13 +11121,13 @@ def test_list_applications_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_applications(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py index ae49ea0f8d10..a2c672d5e00c 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -1612,13 +1612,13 @@ def test_list_docker_images_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_docker_images(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2594,13 +2594,13 @@ def test_list_maven_artifacts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_maven_artifacts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3594,13 +3594,13 @@ def test_list_npm_packages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_npm_packages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4565,13 +4565,13 @@ def test_list_python_packages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_python_packages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6170,13 +6170,13 @@ def test_list_repositories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_repositories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8419,13 +8419,13 @@ def test_list_packages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_packages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9721,13 +9721,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11425,13 +11425,13 @@ def test_list_files_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_files(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12359,13 +12359,13 @@ def test_list_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py index 243ae43a8ad3..707d07f60eb7 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py @@ -2214,13 +2214,13 @@ def test_list_repositories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_repositories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4423,13 +4423,13 @@ def test_list_packages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_packages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5725,13 +5725,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7025,13 +7025,13 @@ def test_list_files_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_files(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7959,13 +7959,13 @@ def test_list_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py index fbc39d3636b1..d7e30e55ddfe 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1803,13 +1803,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4575,13 +4575,13 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5182,13 +5182,13 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_iam_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7778,13 +7778,13 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_saved_queries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9480,13 +9480,13 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.analyze_org_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10103,13 +10103,13 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.analyze_org_policy_governed_containers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10732,13 +10732,13 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.analyze_org_policy_governed_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py index 63ec17c594f7..d421305e1342 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py @@ -1551,13 +1551,13 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2156,13 +2156,13 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_iam_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py index 4d47204eb305..a093eb7f1bb1 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py @@ -1503,13 +1503,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py index cf70a81b883b..43d367655a83 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py @@ -3512,13 +3512,13 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4014,10 +4014,10 @@ def test_list_violations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_violations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py index 283e9b348bad..46fb11dae5ed 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py @@ -3657,10 +3657,10 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py index 36e14ddd4657..b5c28f3276f0 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py @@ -2273,13 +2273,13 @@ def test_list_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5601,13 +5601,13 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8530,13 +8530,13 @@ def test_list_model_evaluations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_model_evaluations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py index 7aa6cc90183a..514f6592909d 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py @@ -2299,13 +2299,13 @@ def test_list_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5227,13 +5227,13 @@ def test_list_table_specs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_table_specs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6601,13 +6601,13 @@ def test_list_column_specs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_column_specs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8348,13 +8348,13 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11274,13 +11274,13 @@ def test_list_model_evaluations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_model_evaluations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-backupdr/CHANGELOG.md b/packages/google-cloud-backupdr/CHANGELOG.md index 5e01c888ad47..5597b23ac5e4 100644 --- a/packages/google-cloud-backupdr/CHANGELOG.md +++ b/packages/google-cloud-backupdr/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.0...google-cloud-backupdr-v0.1.1) (2024-06-24) + + +### Features + +* A new field `satisfies_pzi` is added ([9e20534](https://github.com/googleapis/google-cloud-python/commit/9e205344d6b24d6cedced1d9c177be7652f54267)) +* A new field `satisfies_pzs` is added ([9e20534](https://github.com/googleapis/google-cloud-python/commit/9e205344d6b24d6cedced1d9c177be7652f54267)) +* Updated documentation URI ([9e20534](https://github.com/googleapis/google-cloud-python/commit/9e205344d6b24d6cedced1d9c177be7652f54267)) + ## 0.1.0 (2024-04-15) diff --git a/packages/google-cloud-backupdr/README.rst b/packages/google-cloud-backupdr/README.rst index 68361e42609c..9a5d0c3be9b3 100644 --- a/packages/google-cloud-backupdr/README.rst +++ b/packages/google-cloud-backupdr/README.rst @@ -15,7 +15,7 @@ Python Client for Backup and DR Service API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-backupdr.svg :target: https://pypi.org/project/google-cloud-backupdr/ .. _Backup and DR Service API: https://cloud.google.com/backup-disaster-recovery/docs/concepts/backup-dr -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-backupdr/latest/summary_overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/backupdr/latest .. _Product Documentation: https://cloud.google.com/backup-disaster-recovery/docs/concepts/backup-dr Quick Start diff --git a/packages/google-cloud-backupdr/docs/index.rst b/packages/google-cloud-backupdr/docs/index.rst index 845a3db4bfd7..54f18b02ce65 100644 --- a/packages/google-cloud-backupdr/docs/index.rst +++ b/packages/google-cloud-backupdr/docs/index.rst @@ -25,4 +25,4 @@ For a list of all ``google-cloud-backupdr`` releases: .. toctree:: :hidden: - summary_overview.md \ No newline at end of file + summary_overview.md diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index 558c8aab67c5..0c7cc68730c4 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index 558c8aab67c5..0c7cc68730c4 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 929e1db9d19d..af0b32a1b1da 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -51,6 +51,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers from google.cloud.backupdr_v1.types import backupdr diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index 17975d31958a..51c24c83cd15 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -56,6 +56,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers from google.cloud.backupdr_v1.types import backupdr diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 38d3abe2fa38..621153ce0574 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -1256,11 +1256,6 @@ def __call__( "uri": "/v1/{resource=projects/*/locations/*/managementServers/*}:testIamPermissions", "body": "*", }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/locations/*/backupVaults/*}:testIamPermissions", - "body": "*", - }, ] request, metadata = self._interceptor.pre_test_iam_permissions( diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index a1aa16c5f3a0..07ad09a753bc 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -204,6 +205,10 @@ class ManagementServer(proto.Message): Output only. The hostname or ip address of the exposed AGM endpoints, used by BAs to connect to BA proxy. + satisfies_pzs (google.protobuf.wrappers_pb2.BoolValue): + Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. """ class InstanceType(proto.Enum): @@ -325,6 +330,15 @@ class InstanceState(proto.Enum): proto.STRING, number=18, ) + satisfies_pzs: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=19, + message=wrappers_pb2.BoolValue, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=20, + ) class ListManagementServersRequest(proto.Message): diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index ff879435143f..e1e9b3de673d 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.1.0" + "version": "0.1.1" }, "snippets": [ { diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 2c25609353fe..062194e161f4 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -50,6 +50,7 @@ from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -1518,13 +1519,13 @@ def test_list_management_servers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_management_servers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1707,6 +1708,7 @@ def test_get_management_server(request_type, transport: str = "grpc"): etag="etag_value", oauth2_client_id="oauth2_client_id_value", ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, ) response = client.get_management_server(request) @@ -1725,6 +1727,7 @@ def test_get_management_server(request_type, transport: str = "grpc"): assert response.etag == "etag_value" assert response.oauth2_client_id == "oauth2_client_id_value" assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True def test_get_management_server_empty_call(): @@ -1841,6 +1844,7 @@ async def test_get_management_server_empty_call_async(): etag="etag_value", oauth2_client_id="oauth2_client_id_value", ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, ) ) response = await client.get_management_server() @@ -1922,6 +1926,7 @@ async def test_get_management_server_async( etag="etag_value", oauth2_client_id="oauth2_client_id_value", ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, ) ) response = await client.get_management_server(request) @@ -1941,6 +1946,7 @@ async def test_get_management_server_async( assert response.etag == "etag_value" assert response.oauth2_client_id == "oauth2_client_id_value" assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -3323,6 +3329,7 @@ def test_get_management_server_rest(request_type): etag="etag_value", oauth2_client_id="oauth2_client_id_value", ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -3345,6 +3352,7 @@ def test_get_management_server_rest(request_type): assert response.etag == "etag_value" assert response.oauth2_client_id == "oauth2_client_id_value" assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True def test_get_management_server_rest_use_cached_wrapped_rpc(): @@ -3653,6 +3661,8 @@ def test_create_management_server_rest(request_type): "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", }, "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], + "satisfies_pzs": {"value": True}, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py index 59a0b41c4194..1a1587f0ac7f 100644 --- a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py @@ -1602,13 +1602,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5662,13 +5662,13 @@ def test_list_ssh_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_ssh_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6975,13 +6975,13 @@ def test_list_volumes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_volumes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9630,13 +9630,13 @@ def test_list_networks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_networks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13005,13 +13005,13 @@ def test_list_volume_snapshots_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_volume_snapshots(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13988,13 +13988,13 @@ def test_list_luns_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_luns(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15318,13 +15318,13 @@ def test_list_nfs_shares_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_nfs_shares(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17443,13 +17443,13 @@ def test_list_provisioning_quotas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_provisioning_quotas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20175,13 +20175,13 @@ def test_list_os_images_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_os_images(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index 7231d8c364c9..13a9520187db 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.17.21](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.20...google-cloud-batch-v0.17.21) (2024-06-05) + + +### Documentation + +* Documentation improvements ([7e19b0e](https://github.com/googleapis/google-cloud-python/commit/7e19b0e6a16ce47b588613fa806ee6cb7f2fcb86)) + ## [0.17.20](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.19...google-cloud-batch-v0.17.20) (2024-05-16) diff --git a/packages/google-cloud-batch/README.rst b/packages/google-cloud-batch/README.rst index 8b821ed45b7f..9104cf250e84 100644 --- a/packages/google-cloud-batch/README.rst +++ b/packages/google-cloud-batch/README.rst @@ -15,7 +15,7 @@ Python Client for Cloud Batch .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-batch.svg :target: https://pypi.org/project/google-cloud-batch/ .. _Cloud Batch: https://cloud.google.com/batch/docs -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/batch/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/batch/latest/summary_overview .. _Product Documentation: https://cloud.google.com/batch/docs Quick Start diff --git a/packages/google-cloud-batch/docs/index.rst b/packages/google-cloud-batch/docs/index.rst index 9d3b4fd20106..612800979775 100644 --- a/packages/google-cloud-batch/docs/index.rst +++ b/packages/google-cloud-batch/docs/index.rst @@ -32,3 +32,8 @@ For a list of all ``google-cloud-batch`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-batch/docs/summary_overview.md b/packages/google-cloud-batch/docs/summary_overview.md new file mode 100644 index 000000000000..856f7d27e51c --- /dev/null +++ b/packages/google-cloud-batch/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Cloud Batch API + +Overview of the APIs available for Cloud Batch API. + +## All entries + +Classes, methods and properties & attributes for +Cloud Batch API. + +[classes](https://cloud.google.com/python/docs/reference/batch/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/batch/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/batch/latest/summary_property.html) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 62461ee0e7a5..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.20" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 62461ee0e7a5..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.20" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 1dd92c658cf7..6777c2c1b6f8 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -342,11 +342,19 @@ class JobNotification(proto.Message): Attributes: pubsub_topic (str): - The Pub/Sub topic where notifications like the job state - changes will be published. The topic must exist in the same - project as the job and billings will be charged to this - project. If not specified, no Pub/Sub messages will be sent. - Topic format: ``projects/{project}/topics/{topic}``. + The Pub/Sub topic where notifications for the job, like + state changes, will be published. If undefined, no Pub/Sub + notifications are sent for this job. + + Specify the topic using the following format: + ``projects/{project}/topics/{topic}``. Notably, if you want + to specify a Pub/Sub topic that is in a different project + than the job, your administrator must grant your project's + Batch service agent permission to publish to that topic. + + For more information about configuring Pub/Sub notifications + for a job, see + https://cloud.google.com/batch/docs/enable-notifications. message (google.cloud.batch_v1.types.JobNotification.Message): The attribute requirements of messages to be sent to this Pub/Sub topic. Without this field, @@ -556,7 +564,10 @@ class Disk(proto.Message): Disk type as shown in ``gcloud compute disk-types list``. For example, local SSD uses type "local-ssd". Persistent disks and boot disks use "pd-balanced", "pd-extreme", - "pd-ssd" or "pd-standard". + "pd-ssd" or "pd-standard". If not specified, "pd-standard" + will be used as the default type for non-boot disks, + "pd-balanced" will be used as the default type for boot + disks. size_gb (int): Disk size in GB. @@ -787,16 +798,20 @@ class InstancePolicyOrTemplate(proto.Message): This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): - Set this field true if users want Batch to help fetch - drivers from a third party location and install them for - GPUs specified in policy.accelerators or instance_template - on their behalf. Default is false. + Set this field true if you want Batch to help fetch drivers + from a third party location and install them for GPUs + specified in ``policy.accelerators`` or + ``instance_template`` on your behalf. Default is false. For Container-Optimized Image cases, Batch will install the accelerator driver following milestones of https://cloud.google.com/container-optimized-os/docs/release-notes. For non Container-Optimized Image cases, following https://github.com/GoogleCloudPlatform/compute-gpu-installation/blob/main/linux/install_gpu_driver.py. + install_ops_agent (bool): + Optional. Set this field true if you want + Batch to install Ops Agent on your behalf. + Default is false. """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -814,6 +829,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=3, ) + install_ops_agent: bool = proto.Field( + proto.BOOL, + number=4, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index 0c9500c3ed13..770048a9d2d0 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -157,20 +157,19 @@ class TaskExecution(proto.Message): exit_code (int): The exit code of a finished task. - If the task succeeded, the exit code will be 0. - If the task failed but not due to the following - reasons, the exit code will be 50000. + If the task succeeded, the exit code will be 0. If the task + failed but not due to the following reasons, the exit code + will be 50000. Otherwise, it can be from different sources: - - Batch known failures as - https://cloud.google.com/batch/docs/troubleshooting#reserved-exit-codes. - - Batch runnable execution failures: You can - rely on Batch logs for further diagnose: - https://cloud.google.com/batch/docs/analyze-job-using-logs. - If there are multiple runnables failures, - Batch only exposes the first error caught for - now. + - Batch known failures: + https://cloud.google.com/batch/docs/troubleshooting#reserved-exit-codes. + - Batch runnable execution failures; you can rely on Batch + logs to further diagnose: + https://cloud.google.com/batch/docs/analyze-job-using-logs. + If there are multiple runnables failures, Batch only + exposes the first error. """ exit_code: int = proto.Field( @@ -564,10 +563,16 @@ class TaskSpec(proto.Message): compute_resource (google.cloud.batch_v1.types.ComputeResource): ComputeResource requirements. max_run_duration (google.protobuf.duration_pb2.Duration): - Maximum duration the task should run. The task will be - killed and marked as FAILED if over this limit. The valid - value range for max_run_duration in seconds is [0, - 315576000000.999999999], + Maximum duration the task should run before being + automatically retried (if enabled) or automatically failed. + Format the value of this field as a time limit in seconds + followed by ``s``—for example, ``3600s`` for 1 hour. The + field accepts any value between 0 and the maximum listed for + the ``Duration`` field type at + https://protobuf.dev/reference/protobuf/google.protobuf/#duration; + however, the actual maximum run time for a job will be + limited to the maximum run time for a job listed at + https://cloud.google.com/batch/quotas#max-job-duration. max_retry_count (int): Maximum number of retries on failures. The default, 0, which means never retry. The valid value range is [0, 10]. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/volume.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/volume.py index 3d37e85ab4fd..897bc0da585c 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/volume.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/volume.py @@ -62,19 +62,21 @@ class Volume(proto.Message): The mount path for the volume, e.g. /mnt/disks/share. mount_options (MutableSequence[str]): - For Google Cloud Storage (GCS), mount options - are the options supported by the gcsfuse tool - (https://github.com/GoogleCloudPlatform/gcsfuse). - For existing persistent disks, mount options - provided by the mount command - (https://man7.org/linux/man-pages/man8/mount.8.html) - except writing are supported. This is due to - restrictions of multi-writer mode - (https://cloud.google.com/compute/docs/disks/sharing-disks-between-vms). - For other attached disks and Network File System - (NFS), mount options are these supported by the - mount command - (https://man7.org/linux/man-pages/man8/mount.8.html). + Mount options vary based on the type of storage volume: + + - For a Cloud Storage bucket, all the mount options + provided by the ```gcsfuse`` + tool `__ + are supported. + - For an existing persistent disk, all mount options + provided by the ```mount`` + command `__ + except writing are supported. This is due to restrictions + of `multi-writer + mode `__. + - For any other disk or a Network File System (NFS), all + the mount options provided by the ``mount`` command are + supported. """ nfs: "NFS" = proto.Field( diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 62461ee0e7a5..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.20" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py index 145a1137ff6c..b38df06ca804 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -675,7 +675,7 @@ async def sample_update_job(): The request object. UpdateJob Request. job (:class:`google.cloud.batch_v1alpha.types.Job`): Required. The Job to update. Only fields specified in - ``update_mask`` are updated. + ``updateMask`` are updated. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -683,9 +683,25 @@ async def sample_update_job(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Mask of fields to update. - UpdateJob request now only supports update on - ``task_count`` field in a job's first task group. Other - fields will be ignored. + The ``jobs.patch`` method can only be used while a job + is in the ``QUEUED``, ``SCHEDULED``, or ``RUNNING`` + state and currently only supports increasing the value + of the first ``taskCount`` field in the job's + ``taskGroups`` field. Therefore, you must set the value + of ``updateMask`` to ``taskGroups``. Any other job + fields in the update request will be ignored. + + For example, to update a job's ``taskCount`` to ``2``, + set ``updateMask`` to ``taskGroups`` and use the + following request body: + + :: + + { + "taskGroups":[{ + "taskCount": 2 + }] + } This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index faf6b930412c..e26c7d3f4cfe 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -1151,7 +1151,7 @@ def sample_update_job(): The request object. UpdateJob Request. job (google.cloud.batch_v1alpha.types.Job): Required. The Job to update. Only fields specified in - ``update_mask`` are updated. + ``updateMask`` are updated. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -1159,9 +1159,25 @@ def sample_update_job(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - UpdateJob request now only supports update on - ``task_count`` field in a job's first task group. Other - fields will be ignored. + The ``jobs.patch`` method can only be used while a job + is in the ``QUEUED``, ``SCHEDULED``, or ``RUNNING`` + state and currently only supports increasing the value + of the first ``taskCount`` field in the job's + ``taskGroups`` field. Therefore, you must set the value + of ``updateMask`` to ``taskGroups``. Any other job + fields in the update request will be ignored. + + For example, to update a job's ``taskCount`` to ``2``, + set ``updateMask`` to ``taskGroups`` and use the + following request body: + + :: + + { + "taskGroups":[{ + "taskCount": 2 + }] + } This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py index f3331469d388..7cbfb53f4ef7 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py @@ -176,13 +176,29 @@ class UpdateJobRequest(proto.Message): Attributes: job (google.cloud.batch_v1alpha.types.Job): Required. The Job to update. Only fields specified in - ``update_mask`` are updated. + ``updateMask`` are updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - UpdateJob request now only supports update on ``task_count`` - field in a job's first task group. Other fields will be - ignored. + The ``jobs.patch`` method can only be used while a job is in + the ``QUEUED``, ``SCHEDULED``, or ``RUNNING`` state and + currently only supports increasing the value of the first + ``taskCount`` field in the job's ``taskGroups`` field. + Therefore, you must set the value of ``updateMask`` to + ``taskGroups``. Any other job fields in the update request + will be ignored. + + For example, to update a job's ``taskCount`` to ``2``, set + ``updateMask`` to ``taskGroups`` and use the following + request body: + + :: + + { + "taskGroups":[{ + "taskCount": 2 + }] + } request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 10596cfee4de..80bb9f2925a4 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -445,11 +445,19 @@ class JobNotification(proto.Message): Attributes: pubsub_topic (str): - The Pub/Sub topic where notifications like the job state - changes will be published. The topic must exist in the same - project as the job and billings will be charged to this - project. If not specified, no Pub/Sub messages will be sent. - Topic format: ``projects/{project}/topics/{topic}``. + The Pub/Sub topic where notifications for the job, like + state changes, will be published. If undefined, no Pub/Sub + notifications are sent for this job. + + Specify the topic using the following format: + ``projects/{project}/topics/{topic}``. Notably, if you want + to specify a Pub/Sub topic that is in a different project + than the job, your administrator must grant your project's + Batch service agent permission to publish to that topic. + + For more information about configuring Pub/Sub notifications + for a job, see + https://cloud.google.com/batch/docs/enable-notifications. message (google.cloud.batch_v1alpha.types.JobNotification.Message): The attribute requirements of messages to be sent to this Pub/Sub topic. Without this field, @@ -676,7 +684,10 @@ class Disk(proto.Message): Disk type as shown in ``gcloud compute disk-types list``. For example, local SSD uses type "local-ssd". Persistent disks and boot disks use "pd-balanced", "pd-extreme", - "pd-ssd" or "pd-standard". + "pd-ssd" or "pd-standard". If not specified, "pd-standard" + will be used as the default type for non-boot disks, + "pd-balanced" will be used as the default type for boot + disks. size_gb (int): Disk size in GB. @@ -913,16 +924,20 @@ class InstancePolicyOrTemplate(proto.Message): This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): - Set this field true if users want Batch to help fetch - drivers from a third party location and install them for - GPUs specified in policy.accelerators or instance_template - on their behalf. Default is false. + Set this field true if you want Batch to help fetch drivers + from a third party location and install them for GPUs + specified in ``policy.accelerators`` or + ``instance_template`` on your behalf. Default is false. For Container-Optimized Image cases, Batch will install the accelerator driver following milestones of https://cloud.google.com/container-optimized-os/docs/release-notes. For non Container-Optimized Image cases, following https://github.com/GoogleCloudPlatform/compute-gpu-installation/blob/main/linux/install_gpu_driver.py. + install_ops_agent (bool): + Optional. Set this field true if you want + Batch to install Ops Agent on your behalf. + Default is false. """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -940,6 +955,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=3, ) + install_ops_agent: bool = proto.Field( + proto.BOOL, + number=4, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index 0e513bffe7b5..625ed05e889f 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -166,20 +166,19 @@ class TaskExecution(proto.Message): exit_code (int): The exit code of a finished task. - If the task succeeded, the exit code will be 0. - If the task failed but not due to the following - reasons, the exit code will be 50000. + If the task succeeded, the exit code will be 0. If the task + failed but not due to the following reasons, the exit code + will be 50000. Otherwise, it can be from different sources: - - Batch known failures as - https://cloud.google.com/batch/docs/troubleshooting#reserved-exit-codes. - - Batch runnable execution failures: You can - rely on Batch logs for further diagnose: - https://cloud.google.com/batch/docs/analyze-job-using-logs. - If there are multiple runnables failures, - Batch only exposes the first error caught for - now. + - Batch known failures: + https://cloud.google.com/batch/docs/troubleshooting#reserved-exit-codes. + - Batch runnable execution failures; you can rely on Batch + logs to further diagnose: + https://cloud.google.com/batch/docs/analyze-job-using-logs. + If there are multiple runnables failures, Batch only + exposes the first error. stderr_snippet (str): Optional. The tail end of any content written to standard error by the task execution. This @@ -604,10 +603,16 @@ class TaskSpec(proto.Message): compute_resource (google.cloud.batch_v1alpha.types.ComputeResource): ComputeResource requirements. max_run_duration (google.protobuf.duration_pb2.Duration): - Maximum duration the task should run. The task will be - killed and marked as FAILED if over this limit. The valid - value range for max_run_duration in seconds is [0, - 315576000000.999999999], + Maximum duration the task should run before being + automatically retried (if enabled) or automatically failed. + Format the value of this field as a time limit in seconds + followed by ``s``—for example, ``3600s`` for 1 hour. The + field accepts any value between 0 and the maximum listed for + the ``Duration`` field type at + https://protobuf.dev/reference/protobuf/google.protobuf/#duration; + however, the actual maximum run time for a job will be + limited to the maximum run time for a job listed at + https://cloud.google.com/batch/quotas#max-job-duration. max_retry_count (int): Maximum number of retries on failures. The default, 0, which means never retry. The valid value range is [0, 10]. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/volume.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/volume.py index ec3b028c75ba..5577d4a6696f 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/volume.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/volume.py @@ -67,19 +67,21 @@ class Volume(proto.Message): The mount path for the volume, e.g. /mnt/disks/share. mount_options (MutableSequence[str]): - For Google Cloud Storage (GCS), mount options - are the options supported by the gcsfuse tool - (https://github.com/GoogleCloudPlatform/gcsfuse). - For existing persistent disks, mount options - provided by the mount command - (https://man7.org/linux/man-pages/man8/mount.8.html) - except writing are supported. This is due to - restrictions of multi-writer mode - (https://cloud.google.com/compute/docs/disks/sharing-disks-between-vms). - For other attached disks and Network File System - (NFS), mount options are these supported by the - mount command - (https://man7.org/linux/man-pages/man8/mount.8.html). + Mount options vary based on the type of storage volume: + + - For a Cloud Storage bucket, all the mount options + provided by the ```gcsfuse`` + tool `__ + are supported. + - For an existing persistent disk, all mount options + provided by the ```mount`` + command `__ + except writing are supported. This is due to restrictions + of `multi-writer + mode `__. + - For any other disk or a Network File System (NFS), all + the mount options provided by the ``mount`` command are + supported. """ nfs: "NFS" = proto.Field( diff --git a/packages/google-cloud-batch/noxfile.py b/packages/google-cloud-batch/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-batch/noxfile.py +++ b/packages/google-cloud-batch/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index fca7601a81e9..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.20" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index ee0aa4c50bf9..11f4e7d8d3bf 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.20" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 283c5c0fe29b..c6f4e4b9a5e1 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -2666,13 +2666,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3595,13 +3595,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3879,6 +3879,7 @@ def test_create_job_rest(request_type): }, "instance_template": "instance_template_value", "install_gpu_drivers": True, + "install_ops_agent": True, } ], "service_account": { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 156a31ceab16..3272519fde67 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -3075,13 +3075,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4006,13 +4006,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5843,13 +5843,13 @@ def test_list_resource_allowances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_resource_allowances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6582,6 +6582,7 @@ def test_create_job_rest(request_type): "policy": {}, "instance_template": "instance_template_value", "install_gpu_drivers": True, + "install_ops_agent": True, } ], "instance_templates": [ @@ -7692,6 +7693,7 @@ def test_update_job_rest(request_type): "policy": {}, "instance_template": "instance_template_value", "install_gpu_drivers": True, + "install_ops_agent": True, } ], "instance_templates": [ diff --git a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py index 3d54d17365be..36ff63b2cc8d 100644 --- a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py +++ b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py @@ -1645,13 +1645,13 @@ def test_list_app_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_app_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3881,13 +3881,13 @@ def test_resolve_app_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.resolve_app_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py index 897ddf65b92b..23d9ca3a18d1 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py +++ b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py @@ -1648,13 +1648,13 @@ def test_list_app_connectors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_app_connectors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py index da600e1e94d3..e2a4e3d29cd4 100644 --- a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py +++ b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py @@ -1614,13 +1614,13 @@ def test_list_app_gateways_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_app_gateways(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py index ffcc7cbb588c..95504f8425d9 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py @@ -1691,13 +1691,13 @@ def test_list_client_connector_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_client_connector_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py index beec72ecdc4b..dbace9134ab1 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py +++ b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py @@ -1644,13 +1644,13 @@ def test_list_client_gateways_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_client_gateways(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py index 3be034725199..efe5a591efd4 100644 --- a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py @@ -1587,13 +1587,13 @@ def test_list_data_exchanges_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_exchanges(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2180,13 +2180,13 @@ def test_list_org_data_exchanges_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("organization", ""),)), ) pager = client.list_org_data_exchanges(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4401,13 +4401,13 @@ def test_list_listings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_listings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8198,13 +8198,13 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_subscriptions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8791,13 +8791,13 @@ def test_list_shared_resource_subscriptions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("resource", ""),)), ) pager = client.list_shared_resource_subscriptions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py index 781c98efb777..401e7de48088 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py @@ -2676,13 +2676,13 @@ def test_list_catalogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_catalogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4793,13 +4793,13 @@ def test_list_databases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_databases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7315,13 +7315,13 @@ def test_list_tables_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tables(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py index fef779ae19f5..3a8c310b6e1c 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py @@ -2676,13 +2676,13 @@ def test_list_catalogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_catalogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4793,13 +4793,13 @@ def test_list_databases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_databases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7315,13 +7315,13 @@ def test_list_tables_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tables(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8987,13 +8987,13 @@ def test_list_locks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_locks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py index 6bc9a263f81e..89e5333b9c59 100644 --- a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py @@ -2404,13 +2404,13 @@ def test_list_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py index 0643b9eb75c4..535e09e3959e 100644 --- a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py @@ -1576,13 +1576,13 @@ def test_list_data_exchanges_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_exchanges(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2169,13 +2169,13 @@ def test_list_org_data_exchanges_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("organization", ""),)), ) pager = client.list_org_data_exchanges(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4390,13 +4390,13 @@ def test_list_listings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_listings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py index b85cb5291b76..01976bf26f95 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py @@ -3609,13 +3609,13 @@ def test_list_data_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py index 6ad9b49e8af0..2d55b3909f8d 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py @@ -3149,13 +3149,13 @@ def test_list_data_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 3ca3be93dc33..1dc7d3c11720 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.15.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.2...google-cloud-bigquery-datatransfer-v3.15.3) (2024-05-29) + + +### Documentation + +* [google-cloud-bigquery-datatransfer] update OAuth links in `CreateTransferConfigRequest` and `UpdateTransferConfigRequest` ([#12738](https://github.com/googleapis/google-cloud-python/issues/12738)) ([0a9f0d7](https://github.com/googleapis/google-cloud-python/commit/0a9f0d76fb9b6765123a2f9070ce99ea23d01e0d)) + ## [3.15.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.1...google-cloud-bigquery-datatransfer-v3.15.2) (2024-04-15) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 2b86f76c3161..1138fb6ff3f5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -502,7 +502,7 @@ class CreateTransferConfigRequest(proto.Message): .. raw:: html
-                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
                 
- The client_id is the OAuth client_id of the a data source @@ -523,7 +523,7 @@ class CreateTransferConfigRequest(proto.Message): .. raw:: html
-                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
                 
- The client_id is the OAuth client_id of the a data source @@ -589,7 +589,7 @@ class UpdateTransferConfigRequest(proto.Message): .. raw:: html
-                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
                 
- The client_id is the OAuth client_id of the a data source @@ -613,7 +613,7 @@ class UpdateTransferConfigRequest(proto.Message): .. raw:: html
-                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
                 
- The client_id is the OAuth client_id of the a data source diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index b06511b416d2..f979154be997 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -2077,13 +2077,13 @@ def test_list_data_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4392,13 +4392,13 @@ def test_list_transfer_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transfer_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6459,13 +6459,13 @@ def test_list_transfer_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transfer_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7050,13 +7050,13 @@ def test_list_transfer_logs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transfer_logs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py index 38e2fa237b4f..c5446cc0b85f 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py @@ -2367,13 +2367,13 @@ def test_list_migration_workflows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_migration_workflows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4131,13 +4131,13 @@ def test_list_migration_subtasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_migration_subtasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 2af94a9ba2be..e9a36c810087 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -2369,13 +2369,13 @@ def test_list_migration_workflows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_migration_workflows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4133,13 +4133,13 @@ def test_list_migration_subtasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_migration_subtasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index 915a208d24e2..2134e9f63c33 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -2035,13 +2035,13 @@ def test_list_reservations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reservations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4255,13 +4255,13 @@ def test_list_capacity_commitments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_capacity_commitments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7297,13 +7297,13 @@ def test_list_assignments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assignments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8267,13 +8267,13 @@ def test_search_assignments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_assignments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8872,13 +8872,13 @@ def test_search_all_assignments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_all_assignments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py index 70fa43cf27d2..f291af0268e6 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py @@ -2668,13 +2668,13 @@ def test_list_budgets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_budgets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py index 2a745fb9ac7b..23a52447cc4b 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py @@ -2303,13 +2303,13 @@ def test_list_budgets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_budgets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-billing/CHANGELOG.md b/packages/google-cloud-billing/CHANGELOG.md index 926b95c761e0..f7176d828341 100644 --- a/packages/google-cloud-billing/CHANGELOG.md +++ b/packages/google-cloud-billing/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.13.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.13.3...google-cloud-billing-v1.13.4) (2024-06-24) + + +### Documentation + +* Genereal documentation improvements ([73dd30d](https://github.com/googleapis/google-cloud-python/commit/73dd30dd4eac8721b5db7e664df1c885f5b3d65c)) + ## [1.13.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.13.2...google-cloud-billing-v1.13.3) (2024-03-05) diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index 558c8aab67c5..43798cc8b36d 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.13.4" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index 558c8aab67c5..43798cc8b36d 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.13.4" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py index 0d80a84171a5..6800c05144ba 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py @@ -390,7 +390,7 @@ async def sample_list_skus(): The request object. Request message for ``ListSkus``. parent (:class:`str`): Required. The name of the service. - Example: "services/DA34-426B-A397" + Example: "services/6F81-5844-456A" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py index 285a927d6676..a03c50d43167 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py @@ -815,7 +815,7 @@ def sample_list_skus(): The request object. Request message for ``ListSkus``. parent (str): Required. The name of the service. - Example: "services/DA34-426B-A397" + Example: "services/6F81-5844-456A" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_catalog.py b/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_catalog.py index 4979df350ef2..4bc62f4311d4 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_catalog.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_catalog.py @@ -45,10 +45,10 @@ class Service(proto.Message): Attributes: name (str): The resource name for the service. - Example: "services/DA34-426B-A397". + Example: "services/6F81-5844-456A". service_id (str): The identifier for the service. - Example: "DA34-426B-A397". + Example: "6F81-5844-456A". display_name (str): A human readable display name for this service. @@ -77,16 +77,16 @@ class Service(proto.Message): class Sku(proto.Message): - r"""Encapsulates a single SKU in Google Cloud Platform + r"""Encapsulates a single SKU in Google Cloud Attributes: name (str): The resource name for the SKU. Example: - "services/DA34-426B-A397/skus/AA95-CD31-42FE". + "services/6F81-5844-456A/skus/D041-B8A1-6E0B". sku_id (str): The identifier for the SKU. - Example: "AA95-CD31-42FE". + Example: "D041-B8A1-6E0B". description (str): A human readable description of the SKU, has a maximum length of 256 characters. @@ -515,7 +515,7 @@ class ListSkusRequest(proto.Message): Attributes: parent (str): Required. The name of the service. - Example: "services/DA34-426B-A397". + Example: "services/6F81-5844-456A". start_time (google.protobuf.timestamp_pb2.Timestamp): Optional inclusive start time of the time range for which the pricing versions will be returned. Timestamps in the diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index 94a6ddeb90f9..1e61d35edd97 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "0.1.0" + "version": "1.13.4" }, "snippets": [ { diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py index 1c9140ec95c2..b6755ec77c69 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py @@ -1884,10 +1884,10 @@ def test_list_billing_accounts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_billing_accounts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3255,13 +3255,13 @@ def test_list_project_billing_info_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_project_billing_info(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py index 627c7e8d21a3..bfec6b5e37ba 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py @@ -1361,10 +1361,10 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1919,13 +1919,13 @@ def test_list_skus_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_skus(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py index 14555487a66b..ec9bf75eacab 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py @@ -3497,13 +3497,13 @@ def test_list_attestors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_attestors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py index 689950612796..d91770e954fb 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py @@ -3519,13 +3519,13 @@ def test_list_attestors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_attestors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index f482d810ad62..6ef4542d4de2 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -2218,10 +2218,10 @@ def test_list_builds_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_builds(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4685,10 +4685,10 @@ def test_list_build_triggers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_build_triggers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8147,10 +8147,10 @@ def test_list_worker_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_worker_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py index b9036fc1032f..8b09b40b1f0d 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -2376,13 +2376,13 @@ def test_list_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4949,13 +4949,13 @@ def test_list_repositories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_repositories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6611,13 +6611,13 @@ def test_fetch_linkable_repositories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("connection", ""),)), ) pager = client.fetch_linkable_repositories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py index 0777e46cbb1b..4f5ab31ad9b8 100644 --- a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py +++ b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py @@ -1619,13 +1619,13 @@ def test_list_certificates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3815,13 +3815,13 @@ def test_list_certificate_maps_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_maps(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6021,13 +6021,13 @@ def test_list_certificate_map_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_map_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8259,13 +8259,13 @@ def test_list_dns_authorizations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_dns_authorizations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10494,13 +10494,13 @@ def test_list_certificate_issuance_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_issuance_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12360,13 +12360,13 @@ def test_list_trust_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_trust_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py index 81db42641fe3..587a05f26a01 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py @@ -1889,13 +1889,13 @@ def test_fetch_report_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("report_job", ""),)), ) pager = client.fetch_report_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2458,13 +2458,13 @@ def test_list_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index 30b6b15612f6..1adefcbba17f 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -1490,13 +1490,13 @@ def test_list_customers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_customers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4313,13 +4313,13 @@ def test_list_entitlements_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entitlements(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4827,13 +4827,13 @@ def test_list_transferable_skus_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transferable_skus(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5343,13 +5343,13 @@ def test_list_transferable_offers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transferable_offers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9239,13 +9239,13 @@ def test_list_channel_partner_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channel_partner_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11211,13 +11211,13 @@ def test_list_customer_repricing_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_customer_repricing_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13384,13 +13384,13 @@ def test_list_channel_partner_repricing_configs_pager(transport_name: str = "grp RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channel_partner_repricing_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15140,13 +15140,13 @@ def test_list_sku_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sku_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15727,13 +15727,13 @@ def test_list_sku_group_billable_skus_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sku_group_billable_skus(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16443,10 +16443,10 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16921,13 +16921,13 @@ def test_list_skus_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_skus(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17404,13 +17404,13 @@ def test_list_offers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_offers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17906,13 +17906,13 @@ def test_list_purchasable_skus_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("customer", ""),)), ) pager = client.list_purchasable_skus(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -18414,13 +18414,13 @@ def test_list_purchasable_offers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("customer", ""),)), ) pager = client.list_purchasable_offers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -19825,13 +19825,13 @@ def test_list_subscribers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("account", ""),)), ) pager = client.list_subscribers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20413,13 +20413,13 @@ def test_list_entitlement_changes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entitlement_changes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index 2229c9e09772..4af33ed9f449 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.0...google-cloud-cloudcontrolspartner-v0.1.1) (2024-06-24) + + +### Documentation + +* Mark the accessApprovalRequests.list method as deprecated ([0520183](https://github.com/googleapis/google-cloud-python/commit/052018375c98534aca234c479e28d0bf1bd03857)) + ## 0.1.0 (2024-03-05) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 558c8aab67c5..0c7cc68730c4 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 558c8aab67c5..0c7cc68730c4 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py index fa31b4a7d970..ecfc67fc555a 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import warnings from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -979,7 +980,8 @@ async def list_access_approval_requests( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAccessApprovalRequestsAsyncPager: - r"""Lists access requests associated with a workload + r"""Deprecated: Only returns access approval requests + directly associated with an assured workload folder. .. code-block:: python @@ -1034,6 +1036,11 @@ async def sample_list_access_approval_requests(): automatically. """ + warnings.warn( + "CloudControlsPartnerCoreAsyncClient.list_access_approval_requests is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py index c5d0e17a8def..b48d43f2baf7 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py @@ -1471,7 +1471,8 @@ def list_access_approval_requests( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAccessApprovalRequestsPager: - r"""Lists access requests associated with a workload + r"""Deprecated: Only returns access approval requests + directly associated with an assured workload folder. .. code-block:: python @@ -1526,6 +1527,11 @@ def sample_list_access_approval_requests(): automatically. """ + warnings.warn( + "CloudControlsPartnerCoreClient.list_access_approval_requests is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc.py index 48dde9254329..c1dcbb133eae 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc.py @@ -414,7 +414,8 @@ def list_access_approval_requests( ]: r"""Return a callable for the list access approval requests method over gRPC. - Lists access requests associated with a workload + Deprecated: Only returns access approval requests + directly associated with an assured workload folder. Returns: Callable[[~.ListAccessApprovalRequestsRequest], diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc_asyncio.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc_asyncio.py index 5af62630566e..606bc2661495 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc_asyncio.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/grpc_asyncio.py @@ -423,7 +423,8 @@ def list_access_approval_requests( ]: r"""Return a callable for the list access approval requests method over gRPC. - Lists access requests associated with a workload + Deprecated: Only returns access approval requests + directly associated with an assured workload folder. Returns: Callable[[~.ListAccessApprovalRequestsRequest], diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 558c8aab67c5..0c7cc68730c4 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py index a014aeb179cc..520836e24862 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import warnings from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -348,8 +349,7 @@ async def sample_get_workload(): workload. name (:class:`str`): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -455,10 +455,8 @@ async def sample_list_workloads(): request (Optional[Union[google.cloud.cloudcontrolspartner_v1beta.types.ListWorkloadsRequest, dict]]): The request object. Request to list customer workloads. parent (:class:`str`): - Required. Parent resource - Format: - - organizations/{organization}/locations/{location}/customers/{customer} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}/customers/{customer}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -575,8 +573,7 @@ async def sample_get_customer(): The request object. Message for getting a customer name (:class:`str`): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer} + ``organizations/{organization}/locations/{location}/customers/{customer}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -682,9 +679,8 @@ async def sample_list_customers(): request (Optional[Union[google.cloud.cloudcontrolspartner_v1beta.types.ListCustomersRequest, dict]]): The request object. Request to list customers parent (:class:`str`): - Required. Parent resource - Format: - organizations/{organization}/locations/{location} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -802,8 +798,7 @@ async def sample_get_ekm_connections(): connections associated with a workload name (:class:`str`): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -910,9 +905,8 @@ async def sample_get_partner_permissions(): The request object. Request for getting the partner permissions granted for a workload name (:class:`str`): - Required. Name of the resource to get - in the format: - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions + Required. Name of the resource to get in the format: + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -986,7 +980,8 @@ async def list_access_approval_requests( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAccessApprovalRequestsAsyncPager: - r"""Lists access requests associated with a workload + r"""Deprecated: Only returns access approval requests + directly associated with an assured workload folder. .. code-block:: python @@ -1020,10 +1015,8 @@ async def sample_list_access_approval_requests(): The request object. Request for getting the access requests associated with a workload. parent (:class:`str`): - Required. Parent resource - Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1043,6 +1036,11 @@ async def sample_list_access_approval_requests(): automatically. """ + warnings.warn( + "CloudControlsPartnerCoreAsyncClient.list_access_approval_requests is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1144,7 +1142,7 @@ async def sample_get_partner(): The request object. Message for getting a Partner name (:class:`str`): Required. Format: - organizations/{organization}/locations/{location}/partner + ``organizations/{organization}/locations/{location}/partner`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py index 34262752babc..edd49f5553d0 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py @@ -858,8 +858,7 @@ def sample_get_workload(): workload. name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -962,10 +961,8 @@ def sample_list_workloads(): request (Union[google.cloud.cloudcontrolspartner_v1beta.types.ListWorkloadsRequest, dict]): The request object. Request to list customer workloads. parent (str): - Required. Parent resource - Format: - - organizations/{organization}/locations/{location}/customers/{customer} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}/customers/{customer}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1079,8 +1076,7 @@ def sample_get_customer(): The request object. Message for getting a customer name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer} + ``organizations/{organization}/locations/{location}/customers/{customer}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1183,9 +1179,8 @@ def sample_list_customers(): request (Union[google.cloud.cloudcontrolspartner_v1beta.types.ListCustomersRequest, dict]): The request object. Request to list customers parent (str): - Required. Parent resource - Format: - organizations/{organization}/locations/{location} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1300,8 +1295,7 @@ def sample_get_ekm_connections(): connections associated with a workload name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1405,9 +1399,8 @@ def sample_get_partner_permissions(): The request object. Request for getting the partner permissions granted for a workload name (str): - Required. Name of the resource to get - in the format: - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions + Required. Name of the resource to get in the format: + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1478,7 +1471,8 @@ def list_access_approval_requests( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAccessApprovalRequestsPager: - r"""Lists access requests associated with a workload + r"""Deprecated: Only returns access approval requests + directly associated with an assured workload folder. .. code-block:: python @@ -1512,10 +1506,8 @@ def sample_list_access_approval_requests(): The request object. Request for getting the access requests associated with a workload. parent (str): - Required. Parent resource - Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1535,6 +1527,11 @@ def sample_list_access_approval_requests(): automatically. """ + warnings.warn( + "CloudControlsPartnerCoreClient.list_access_approval_requests is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1635,7 +1632,7 @@ def sample_get_partner(): The request object. Message for getting a Partner name (str): Required. Format: - organizations/{organization}/locations/{location}/partner + ``organizations/{organization}/locations/{location}/partner`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py index b7a9c0943b44..8fb7e775b06a 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py @@ -414,7 +414,8 @@ def list_access_approval_requests( ]: r"""Return a callable for the list access approval requests method over gRPC. - Lists access requests associated with a workload + Deprecated: Only returns access approval requests + directly associated with an assured workload folder. Returns: Callable[[~.ListAccessApprovalRequestsRequest], diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py index 3c590bdc8be1..a7140ab751fd 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py @@ -423,7 +423,8 @@ def list_access_approval_requests( ]: r"""Return a callable for the list access approval requests method over gRPC. - Lists access requests associated with a workload + Deprecated: Only returns access approval requests + directly associated with an assured workload folder. Returns: Callable[[~.ListAccessApprovalRequestsRequest], diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py index ec3a078c7131..d2c9d031d1d0 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py @@ -327,9 +327,8 @@ async def sample_list_violations(): The request object. Message for requesting list of Violations parent (:class:`str`): - Required. Parent resource - Format - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + Required. Parent resource Format + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -446,8 +445,7 @@ async def sample_get_violation(): The request object. Message for getting a Violation name (:class:`str`): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py index ec551ae02030..52ead189fd6b 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py @@ -755,9 +755,8 @@ def sample_list_violations(): The request object. Message for requesting list of Violations parent (str): - Required. Parent resource - Format - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + Required. Parent resource Format + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -871,8 +870,7 @@ def sample_get_violation(): The request object. Message for getting a Violation name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/access_approval_requests.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/access_approval_requests.py index be3da5672a5c..5a9acd74ffa2 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/access_approval_requests.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/access_approval_requests.py @@ -37,7 +37,7 @@ class AccessApprovalRequest(proto.Message): Attributes: name (str): Identifier. Format: - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/accessApprovalRequests/{access_approval_request}. + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/accessApprovalRequests/{access_approval_request}`` request_time (google.protobuf.timestamp_pb2.Timestamp): The time at which approval was requested. requested_reason (google.cloud.cloudcontrolspartner_v1beta.types.AccessReason): @@ -77,10 +77,8 @@ class ListAccessApprovalRequestsRequest(proto.Message): Attributes: parent (str): - Required. Parent resource - Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` page_size (int): Optional. The maximum number of access requests to return. The service may return fewer diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customer_workloads.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customer_workloads.py index fd766f002783..c456c7ce49ea 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customer_workloads.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customer_workloads.py @@ -45,8 +45,7 @@ class Workload(proto.Message): Attributes: name (str): Identifier. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` folder_id (int): Output only. Folder id this workload is associated with @@ -145,10 +144,8 @@ class ListWorkloadsRequest(proto.Message): Attributes: parent (str): - Required. Parent resource - Format: - - organizations/{organization}/locations/{location}/customers/{customer} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}/customers/{customer}`` page_size (int): The maximum number of workloads to return. The service may return fewer than this value. If @@ -224,8 +221,7 @@ class GetWorkloadRequest(proto.Message): Attributes: name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py index b6d24095bd61..dae25231d4f0 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py @@ -43,8 +43,7 @@ class Customer(proto.Message): Attributes: name (str): Identifier. Format: - - organizations/{organization}/locations/{location}/customers/{customer} + ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): The customer organization's display name. E.g. "google.com". @@ -79,9 +78,8 @@ class ListCustomersRequest(proto.Message): Attributes: parent (str): - Required. Parent resource - Format: - organizations/{organization}/locations/{location} + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}`` page_size (int): The maximum number of Customers to return. The service may return fewer than this value. If @@ -157,8 +155,7 @@ class GetCustomerRequest(proto.Message): Attributes: name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer} + ``organizations/{organization}/locations/{location}/customers/{customer}`` """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/ekm_connections.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/ekm_connections.py index 49ab2625cedc..c62fabf8bbcf 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/ekm_connections.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/ekm_connections.py @@ -35,8 +35,7 @@ class EkmConnections(proto.Message): Attributes: name (str): Identifier. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections`` ekm_connections (MutableSequence[google.cloud.cloudcontrolspartner_v1beta.types.EkmConnection]): The EKM connections associated with the workload @@ -60,8 +59,7 @@ class GetEkmConnectionsRequest(proto.Message): Attributes: name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/ekmConnections`` """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py index ead46bbbb6de..d94dff633d35 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py @@ -34,8 +34,7 @@ class PartnerPermissions(proto.Message): Attributes: name (str): Identifier. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions`` partner_permissions (MutableSequence[google.cloud.cloudcontrolspartner_v1beta.types.PartnerPermissions.Permission]): The partner permissions granted for the workload @@ -82,9 +81,8 @@ class GetPartnerPermissionsRequest(proto.Message): Attributes: name (str): - Required. Name of the resource to get in the - format: - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions + Required. Name of the resource to get in the format: + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/partnerPermissions`` """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py index b4fa5045e626..60daac5eb67b 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py @@ -36,9 +36,8 @@ class Partner(proto.Message): Attributes: name (str): - Identifier. The resource name of the partner. - Format: - organizations/{organization}/locations/{location}/partner + Identifier. The resource name of the partner. Format: + ``organizations/{organization}/locations/{location}/partner`` Example: "organizations/123456/locations/us-central1/partner". skus (MutableSequence[google.cloud.cloudcontrolspartner_v1beta.types.Sku]): @@ -103,7 +102,7 @@ class GetPartnerRequest(proto.Message): Attributes: name (str): Required. Format: - organizations/{organization}/locations/{location}/partner + ``organizations/{organization}/locations/{location}/partner`` """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/violations.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/violations.py index cf63f1c18a66..7302b6add4d4 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/violations.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/violations.py @@ -38,8 +38,7 @@ class Violation(proto.Message): Attributes: name (str): Identifier. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation}`` description (str): Output only. Description for the Violation. e.g. OrgPolicy gcp.resourceLocations has non @@ -64,9 +63,9 @@ class Violation(proto.Message): Output only. Immutable. Name of the OrgPolicy which was modified with non-compliant change and resulted this violation. Format: - projects/{project_number}/policies/{constraint_name} - folders/{folder_id}/policies/{constraint_name} - organizations/{organization_id}/policies/{constraint_name} + ``projects/{project_number}/policies/{constraint_name}`` + ``folders/{folder_id}/policies/{constraint_name}`` + ``organizations/{organization_id}/policies/{constraint_name}`` folder_id (int): The folder_id of the violation remediation (google.cloud.cloudcontrolspartner_v1beta.types.Violation.Remediation): @@ -288,9 +287,8 @@ class ListViolationsRequest(proto.Message): Attributes: parent (str): - Required. Parent resource - Format - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload} + Required. Parent resource Format + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}`` page_size (int): Optional. The maximum number of customers row to return. The service may return fewer than @@ -378,8 +376,7 @@ class GetViolationRequest(proto.Message): Attributes: name (str): Required. Format: - - organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation} + ``organizations/{organization}/locations/{location}/customers/{customer}/workloads/{workload}/violations/{violation}`` """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 5a93fc370b33..200110f94ed2 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.1.1" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index 642805220b5c..eb9ef4093266 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.1.1" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py index 92e02550f8bd..da322c990c73 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py @@ -2025,13 +2025,13 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2970,13 +2970,13 @@ def test_list_customers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_customers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4372,13 +4372,13 @@ def test_list_access_approval_requests_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_access_approval_requests(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py index 605fa1db9812..de2c6ce420dc 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py @@ -1632,13 +1632,13 @@ def test_list_violations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_violations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py index b63d351062dc..b2eecf90fb4d 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py @@ -2025,13 +2025,13 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2970,13 +2970,13 @@ def test_list_customers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_customers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4372,13 +4372,13 @@ def test_list_access_approval_requests_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_access_approval_requests(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py index 435ed4182b45..b405a28d823e 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py @@ -1632,13 +1632,13 @@ def test_list_violations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_violations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py index 38ff09d972af..05d7f5823171 100644 --- a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py +++ b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py @@ -1492,13 +1492,13 @@ def test_list_quota_infos_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_quota_infos(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2518,13 +2518,13 @@ def test_list_quota_preferences_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_quota_preferences(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index d9093caa15fa..224e2fd662ef 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -2288,13 +2288,13 @@ def test_list_orders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_orders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py index 23f1028f5c21..6934de212878 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py @@ -2288,13 +2288,13 @@ def test_list_orders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_orders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py index f1337c609ff8..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py index f1337c609ff8..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index dd9b34147f0f..8a5a8ea62778 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-compute", - "version": "1.19.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index f21dd24ced01..8cd327d711e8 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -1486,13 +1486,13 @@ def test_list_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3739,13 +3739,13 @@ def test_list_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5123,13 +5123,13 @@ def test_list_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9015,13 +9015,13 @@ def test_list_previews_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_previews(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10277,13 +10277,13 @@ def test_list_terraform_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_terraform_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index 548735e905c5..5373e6f514ca 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -3229,13 +3229,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4921,13 +4921,13 @@ def test_list_analyses_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_analyses(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12965,13 +12965,13 @@ def test_list_phrase_matchers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_phrase_matchers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16221,13 +16221,13 @@ def test_list_views_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_views(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index 227923307c78..ed54512ce230 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,53 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.47.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.46.0...google-cloud-container-v2.47.0) (2024-06-10) + + +### Features + +* A new field `accelerators` is added to message `.google.container.v1.UpdateNodePoolRequest` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `additive_vpc_scope_dns_domain` is added to message `.google.container.v1.DNSConfig` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `containerd_config` is added to message `.google.container.v1.NodeConfig` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `containerd_config` is added to message `.google.container.v1.NodeConfigDefaults` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `containerd_config` is added to message `.google.container.v1.UpdateNodePoolRequest` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `desired_containerd_config` is added to message `.google.container.v1.ClusterUpdate` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `desired_node_kubelet_config` is added to message `.google.container.v1.ClusterUpdate` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `desired_node_pool_auto_config_kubelet_config` is added to message `.google.container.v1.ClusterUpdate` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `enable_nested_virtualization` is added to message `.google.container.v1.AdvancedMachineFeatures` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `hugepages` is added to message `.google.container.v1.LinuxNodeConfig` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `node_kubelet_config` is added to message `.google.container.v1.NodeConfigDefaults` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `node_kubelet_config` is added to message `.google.container.v1.NodePoolAutoConfig` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `satisfies_pzi` is added to message `.google.container.v1.Cluster` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new field `satisfies_pzs` is added to message `.google.container.v1.Cluster` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new message `ContainerdConfig` is added ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new message `HugepagesConfig` is added ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new method_signature `parent` is added to method `ListOperations` in service `ClusterManager` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new value `CADVISOR` is added to enum `Component` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new value `ENTERPRISE` is added to enum `Mode` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new value `KUBELET` is added to enum `Component` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A new value `MPS` is added to enum `GPUSharingStrategy` ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* Enable REST transport for google/container/v1 ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) + + +### Documentation + +* A comment for field `desired_private_cluster_config` in message `.google.container.v1.ClusterUpdate` is changed ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) +* A comment for field `in_transit_encryption_config` in message `.google.container.v1.NetworkConfig` is changed ([0d738fa](https://github.com/googleapis/google-cloud-python/commit/0d738fa1a8751a1cee2071c7af187e2d08b1a889)) + +## [2.46.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.45.0...google-cloud-container-v2.46.0) (2024-05-29) + + +### Features + +* A new message `HugepagesConfig` is added ([e0c6241](https://github.com/googleapis/google-cloud-python/commit/e0c6241e3c93cba3529288744fd73cc1cd1dfcb0)) + + +### Documentation + +* A comment for field `desired_in_transit_encryption_config` in message `.google.container.v1beta1.ClusterUpdate` is changed ([e0c6241](https://github.com/googleapis/google-cloud-python/commit/e0c6241e3c93cba3529288744fd73cc1cd1dfcb0)) +* A comment for field `desired_private_cluster_config` in message `.google.container.v1beta1.ClusterUpdate` is changed ([e0c6241](https://github.com/googleapis/google-cloud-python/commit/e0c6241e3c93cba3529288744fd73cc1cd1dfcb0)) + ## [2.45.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.44.0...google-cloud-container-v2.45.0) (2024-03-27) diff --git a/packages/google-cloud-container/google/cloud/container/__init__.py b/packages/google-cloud-container/google/cloud/container/__init__.py index 032b7b38ef3c..24c059070a7e 100644 --- a/packages/google-cloud-container/google/cloud/container/__init__.py +++ b/packages/google-cloud-container/google/cloud/container/__init__.py @@ -52,6 +52,7 @@ CompleteNodePoolUpgradeRequest, ConfidentialNodes, ConfigConnectorConfig, + ContainerdConfig, CostManagementConfig, CreateClusterRequest, CreateNodePoolRequest, @@ -225,6 +226,7 @@ "CompleteNodePoolUpgradeRequest", "ConfidentialNodes", "ConfigConnectorConfig", + "ContainerdConfig", "CostManagementConfig", "CreateClusterRequest", "CreateNodePoolRequest", diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 558c8aab67c5..4411a7dddf3d 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.47.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/__init__.py index c2400f9eed51..bc633a615e54 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/__init__.py @@ -47,6 +47,7 @@ CompleteNodePoolUpgradeRequest, ConfidentialNodes, ConfigConnectorConfig, + ContainerdConfig, CostManagementConfig, CreateClusterRequest, CreateNodePoolRequest, @@ -220,6 +221,7 @@ "CompleteNodePoolUpgradeRequest", "ConfidentialNodes", "ConfigConnectorConfig", + "ContainerdConfig", "CostManagementConfig", "CreateClusterRequest", "CreateNodePoolRequest", diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json b/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json index 85cb487b948d..0183acedf64a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json @@ -356,6 +356,181 @@ ] } } + }, + "rest": { + "libraryClient": "ClusterManagerClient", + "rpcs": { + "CancelOperation": { + "methods": [ + "cancel_operation" + ] + }, + "CheckAutopilotCompatibility": { + "methods": [ + "check_autopilot_compatibility" + ] + }, + "CompleteIPRotation": { + "methods": [ + "complete_ip_rotation" + ] + }, + "CompleteNodePoolUpgrade": { + "methods": [ + "complete_node_pool_upgrade" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateNodePool": { + "methods": [ + "create_node_pool" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteNodePool": { + "methods": [ + "delete_node_pool" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetJSONWebKeys": { + "methods": [ + "get_json_web_keys" + ] + }, + "GetNodePool": { + "methods": [ + "get_node_pool" + ] + }, + "GetOperation": { + "methods": [ + "get_operation" + ] + }, + "GetServerConfig": { + "methods": [ + "get_server_config" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListNodePools": { + "methods": [ + "list_node_pools" + ] + }, + "ListOperations": { + "methods": [ + "list_operations" + ] + }, + "ListUsableSubnetworks": { + "methods": [ + "list_usable_subnetworks" + ] + }, + "RollbackNodePoolUpgrade": { + "methods": [ + "rollback_node_pool_upgrade" + ] + }, + "SetAddonsConfig": { + "methods": [ + "set_addons_config" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetLegacyAbac": { + "methods": [ + "set_legacy_abac" + ] + }, + "SetLocations": { + "methods": [ + "set_locations" + ] + }, + "SetLoggingService": { + "methods": [ + "set_logging_service" + ] + }, + "SetMaintenancePolicy": { + "methods": [ + "set_maintenance_policy" + ] + }, + "SetMasterAuth": { + "methods": [ + "set_master_auth" + ] + }, + "SetMonitoringService": { + "methods": [ + "set_monitoring_service" + ] + }, + "SetNetworkPolicy": { + "methods": [ + "set_network_policy" + ] + }, + "SetNodePoolAutoscaling": { + "methods": [ + "set_node_pool_autoscaling" + ] + }, + "SetNodePoolManagement": { + "methods": [ + "set_node_pool_management" + ] + }, + "SetNodePoolSize": { + "methods": [ + "set_node_pool_size" + ] + }, + "StartIPRotation": { + "methods": [ + "start_ip_rotation" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateMaster": { + "methods": [ + "update_master" + ] + }, + "UpdateNodePool": { + "methods": [ + "update_node_pool" + ] + } + } } } } diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 558c8aab67c5..4411a7dddf3d 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.47.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py index e81399d59198..83493ddbf1e7 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py @@ -2109,6 +2109,7 @@ async def list_operations( *, project_id: Optional[str] = None, zone: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), @@ -2165,6 +2166,15 @@ async def sample_list_operations(): This corresponds to the ``zone`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + parent (:class:`str`): + The parent (project and location) where the operations + will be listed. Specified in the format + ``projects/*/locations/*``. Location "-" matches all + zones and all regions. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2180,7 +2190,7 @@ async def sample_list_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, zone]) + has_flattened_params = any([project_id, zone, parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2198,6 +2208,8 @@ async def sample_list_operations(): request.project_id = project_id if zone is not None: request.zone = zone + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py index 9339a7dc2a78..3f0f77619d98 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -56,6 +56,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, ClusterManagerTransport from .transports.grpc import ClusterManagerGrpcTransport from .transports.grpc_asyncio import ClusterManagerGrpcAsyncIOTransport +from .transports.rest import ClusterManagerRestTransport class ClusterManagerClientMeta(type): @@ -71,6 +72,7 @@ class ClusterManagerClientMeta(type): ) # type: Dict[str, Type[ClusterManagerTransport]] _transport_registry["grpc"] = ClusterManagerGrpcTransport _transport_registry["grpc_asyncio"] = ClusterManagerGrpcAsyncIOTransport + _transport_registry["rest"] = ClusterManagerRestTransport def get_transport_class( cls, @@ -2487,6 +2489,7 @@ def list_operations( *, project_id: Optional[str] = None, zone: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), @@ -2543,6 +2546,15 @@ def sample_list_operations(): This corresponds to the ``zone`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + parent (str): + The parent (project and location) where the operations + will be listed. Specified in the format + ``projects/*/locations/*``. Location "-" matches all + zones and all regions. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2558,7 +2570,7 @@ def sample_list_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, zone]) + has_flattened_params = any([project_id, zone, parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2575,6 +2587,8 @@ def sample_list_operations(): request.project_id = project_id if zone is not None: request.zone = zone + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py index eb804a437220..baa849672506 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py @@ -19,14 +19,18 @@ from .base import ClusterManagerTransport from .grpc import ClusterManagerGrpcTransport from .grpc_asyncio import ClusterManagerGrpcAsyncIOTransport +from .rest import ClusterManagerRestInterceptor, ClusterManagerRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ClusterManagerTransport]] _transport_registry["grpc"] = ClusterManagerGrpcTransport _transport_registry["grpc_asyncio"] = ClusterManagerGrpcAsyncIOTransport +_transport_registry["rest"] = ClusterManagerRestTransport __all__ = ( "ClusterManagerTransport", "ClusterManagerGrpcTransport", "ClusterManagerGrpcAsyncIOTransport", + "ClusterManagerRestTransport", + "ClusterManagerRestInterceptor", ) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py new file mode 100644 index 000000000000..c7301b3c4773 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py @@ -0,0 +1,4698 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.container_v1.types import cluster_service + +from .base import ClusterManagerTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ClusterManagerRestInterceptor: + """Interceptor for ClusterManager. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ClusterManagerRestTransport. + + .. code-block:: python + class MyCustomClusterManagerInterceptor(ClusterManagerRestInterceptor): + def pre_cancel_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_check_autopilot_compatibility(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_autopilot_compatibility(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_complete_ip_rotation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_complete_ip_rotation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_complete_node_pool_upgrade(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_node_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_node_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_node_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_node_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_json_web_keys(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_json_web_keys(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_node_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_node_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_operation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_server_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_server_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_node_pools(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_node_pools(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable_subnetworks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable_subnetworks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback_node_pool_upgrade(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rollback_node_pool_upgrade(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_addons_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_addons_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_legacy_abac(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_legacy_abac(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_locations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_locations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_logging_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_logging_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_maintenance_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_maintenance_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_master_auth(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_master_auth(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_monitoring_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_monitoring_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_network_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_network_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_node_pool_autoscaling(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_node_pool_autoscaling(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_node_pool_management(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_node_pool_management(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_node_pool_size(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_node_pool_size(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_ip_rotation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_ip_rotation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_master(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_master(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_node_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_node_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ClusterManagerRestTransport(interceptor=MyCustomClusterManagerInterceptor()) + client = ClusterManagerClient(transport=transport) + + + """ + + def pre_cancel_operation( + self, + request: cluster_service.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def pre_check_autopilot_compatibility( + self, + request: cluster_service.CheckAutopilotCompatibilityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + cluster_service.CheckAutopilotCompatibilityRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for check_autopilot_compatibility + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_check_autopilot_compatibility( + self, response: cluster_service.CheckAutopilotCompatibilityResponse + ) -> cluster_service.CheckAutopilotCompatibilityResponse: + """Post-rpc interceptor for check_autopilot_compatibility + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_complete_ip_rotation( + self, + request: cluster_service.CompleteIPRotationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.CompleteIPRotationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for complete_ip_rotation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_complete_ip_rotation( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for complete_ip_rotation + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_complete_node_pool_upgrade( + self, + request: cluster_service.CompleteNodePoolUpgradeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + cluster_service.CompleteNodePoolUpgradeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for complete_node_pool_upgrade + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def pre_create_cluster( + self, + request: cluster_service.CreateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.CreateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_create_cluster( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for create_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_create_node_pool( + self, + request: cluster_service.CreateNodePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.CreateNodePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_node_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_create_node_pool( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for create_node_pool + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_delete_cluster( + self, + request: cluster_service.DeleteClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_delete_cluster( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_delete_node_pool( + self, + request: cluster_service.DeleteNodePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.DeleteNodePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_node_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_delete_node_pool( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for delete_node_pool + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_get_cluster( + self, + request: cluster_service.GetClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_get_cluster( + self, response: cluster_service.Cluster + ) -> cluster_service.Cluster: + """Post-rpc interceptor for get_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_get_json_web_keys( + self, + request: cluster_service.GetJSONWebKeysRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.GetJSONWebKeysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_json_web_keys + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_get_json_web_keys( + self, response: cluster_service.GetJSONWebKeysResponse + ) -> cluster_service.GetJSONWebKeysResponse: + """Post-rpc interceptor for get_json_web_keys + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_get_node_pool( + self, + request: cluster_service.GetNodePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.GetNodePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_node_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_get_node_pool( + self, response: cluster_service.NodePool + ) -> cluster_service.NodePool: + """Post-rpc interceptor for get_node_pool + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: cluster_service.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_get_operation( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_get_server_config( + self, + request: cluster_service.GetServerConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.GetServerConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_server_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_get_server_config( + self, response: cluster_service.ServerConfig + ) -> cluster_service.ServerConfig: + """Post-rpc interceptor for get_server_config + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_list_clusters( + self, + request: cluster_service.ListClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_list_clusters( + self, response: cluster_service.ListClustersResponse + ) -> cluster_service.ListClustersResponse: + """Post-rpc interceptor for list_clusters + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_list_node_pools( + self, + request: cluster_service.ListNodePoolsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.ListNodePoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_node_pools + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_list_node_pools( + self, response: cluster_service.ListNodePoolsResponse + ) -> cluster_service.ListNodePoolsResponse: + """Post-rpc interceptor for list_node_pools + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: cluster_service.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_list_operations( + self, response: cluster_service.ListOperationsResponse + ) -> cluster_service.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_list_usable_subnetworks( + self, + request: cluster_service.ListUsableSubnetworksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.ListUsableSubnetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_usable_subnetworks + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_list_usable_subnetworks( + self, response: cluster_service.ListUsableSubnetworksResponse + ) -> cluster_service.ListUsableSubnetworksResponse: + """Post-rpc interceptor for list_usable_subnetworks + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_rollback_node_pool_upgrade( + self, + request: cluster_service.RollbackNodePoolUpgradeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + cluster_service.RollbackNodePoolUpgradeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for rollback_node_pool_upgrade + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_rollback_node_pool_upgrade( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for rollback_node_pool_upgrade + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_addons_config( + self, + request: cluster_service.SetAddonsConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetAddonsConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_addons_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_addons_config( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_addons_config + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_labels( + self, + request: cluster_service.SetLabelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetLabelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_labels( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_legacy_abac( + self, + request: cluster_service.SetLegacyAbacRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetLegacyAbacRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_legacy_abac + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_legacy_abac( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_legacy_abac + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_locations( + self, + request: cluster_service.SetLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_locations( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_locations + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_logging_service( + self, + request: cluster_service.SetLoggingServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetLoggingServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_logging_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_logging_service( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_logging_service + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_maintenance_policy( + self, + request: cluster_service.SetMaintenancePolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetMaintenancePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_maintenance_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_maintenance_policy( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_maintenance_policy + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_master_auth( + self, + request: cluster_service.SetMasterAuthRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetMasterAuthRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_master_auth + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_master_auth( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_master_auth + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_monitoring_service( + self, + request: cluster_service.SetMonitoringServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetMonitoringServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_monitoring_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_monitoring_service( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_monitoring_service + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_network_policy( + self, + request: cluster_service.SetNetworkPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetNetworkPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_network_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_network_policy( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_network_policy + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_node_pool_autoscaling( + self, + request: cluster_service.SetNodePoolAutoscalingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + cluster_service.SetNodePoolAutoscalingRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for set_node_pool_autoscaling + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_node_pool_autoscaling( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_node_pool_autoscaling + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_node_pool_management( + self, + request: cluster_service.SetNodePoolManagementRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetNodePoolManagementRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_node_pool_management + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_node_pool_management( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_node_pool_management + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_set_node_pool_size( + self, + request: cluster_service.SetNodePoolSizeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.SetNodePoolSizeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_node_pool_size + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_set_node_pool_size( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for set_node_pool_size + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_start_ip_rotation( + self, + request: cluster_service.StartIPRotationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.StartIPRotationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_ip_rotation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_start_ip_rotation( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for start_ip_rotation + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, + request: cluster_service.UpdateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_update_cluster( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_update_master( + self, + request: cluster_service.UpdateMasterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.UpdateMasterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_master + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_update_master( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for update_master + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + def pre_update_node_pool( + self, + request: cluster_service.UpdateNodePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cluster_service.UpdateNodePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_node_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterManager server. + """ + return request, metadata + + def post_update_node_pool( + self, response: cluster_service.Operation + ) -> cluster_service.Operation: + """Post-rpc interceptor for update_node_pool + + Override in a subclass to manipulate the response + after it is returned by the ClusterManager server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ClusterManagerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ClusterManagerRestInterceptor + + +class ClusterManagerRestTransport(ClusterManagerTransport): + """REST backend transport for ClusterManager. + + Google Kubernetes Engine Cluster Manager v1 + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ClusterManagerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'container.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ClusterManagerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CancelOperation(ClusterManagerRestStub): + def __hash__(self): + return hash("CancelOperation") + + def __call__( + self, + request: cluster_service.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the cancel operation method over HTTP. + + Args: + request (~.cluster_service.CancelOperationRequest): + The request object. CancelOperationRequest cancels a + single operation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/operations/{operation_id}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + pb_request = cluster_service.CancelOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CheckAutopilotCompatibility(ClusterManagerRestStub): + def __hash__(self): + return hash("CheckAutopilotCompatibility") + + def __call__( + self, + request: cluster_service.CheckAutopilotCompatibilityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.CheckAutopilotCompatibilityResponse: + r"""Call the check autopilot + compatibility method over HTTP. + + Args: + request (~.cluster_service.CheckAutopilotCompatibilityRequest): + The request object. CheckAutopilotCompatibilityRequest + requests getting the blockers for the + given operation in the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.CheckAutopilotCompatibilityResponse: + CheckAutopilotCompatibilityResponse + has a list of compatibility issues. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:checkAutopilotCompatibility", + }, + ] + request, metadata = self._interceptor.pre_check_autopilot_compatibility( + request, metadata + ) + pb_request = cluster_service.CheckAutopilotCompatibilityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.CheckAutopilotCompatibilityResponse() + pb_resp = cluster_service.CheckAutopilotCompatibilityResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_autopilot_compatibility(resp) + return resp + + class _CompleteIPRotation(ClusterManagerRestStub): + def __hash__(self): + return hash("CompleteIPRotation") + + def __call__( + self, + request: cluster_service.CompleteIPRotationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the complete ip rotation method over HTTP. + + Args: + request (~.cluster_service.CompleteIPRotationRequest): + The request object. CompleteIPRotationRequest moves the + cluster master back into single-IP mode. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:completeIpRotation", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}:completeIpRotation", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_complete_ip_rotation( + request, metadata + ) + pb_request = cluster_service.CompleteIPRotationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_complete_ip_rotation(resp) + return resp + + class _CompleteNodePoolUpgrade(ClusterManagerRestStub): + def __hash__(self): + return hash("CompleteNodePoolUpgrade") + + def __call__( + self, + request: cluster_service.CompleteNodePoolUpgradeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the complete node pool + upgrade method over HTTP. + + Args: + request (~.cluster_service.CompleteNodePoolUpgradeRequest): + The request object. CompleteNodePoolUpgradeRequest sets + the name of target node pool to complete + upgrade. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}:completeUpgrade", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_complete_node_pool_upgrade( + request, metadata + ) + pb_request = cluster_service.CompleteNodePoolUpgradeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateCluster(ClusterManagerRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.cluster_service.CreateClusterRequest): + The request object. CreateClusterRequest creates a + cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/clusters", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = cluster_service.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _CreateNodePool(ClusterManagerRestStub): + def __hash__(self): + return hash("CreateNodePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.CreateNodePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the create node pool method over HTTP. + + Args: + request (~.cluster_service.CreateNodePoolRequest): + The request object. CreateNodePoolRequest creates a node + pool for a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/nodePools", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_node_pool( + request, metadata + ) + pb_request = cluster_service.CreateNodePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_node_pool(resp) + return resp + + class _DeleteCluster(ClusterManagerRestStub): + def __hash__(self): + return hash("DeleteCluster") + + def __call__( + self, + request: cluster_service.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.cluster_service.DeleteClusterRequest): + The request object. DeleteClusterRequest deletes a + cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + }, + { + "method": "delete", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = cluster_service.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DeleteNodePool(ClusterManagerRestStub): + def __hash__(self): + return hash("DeleteNodePool") + + def __call__( + self, + request: cluster_service.DeleteNodePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the delete node pool method over HTTP. + + Args: + request (~.cluster_service.DeleteNodePoolRequest): + The request object. DeleteNodePoolRequest deletes a node + pool for a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}", + }, + { + "method": "delete", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}", + }, + ] + request, metadata = self._interceptor.pre_delete_node_pool( + request, metadata + ) + pb_request = cluster_service.DeleteNodePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_node_pool(resp) + return resp + + class _GetCluster(ClusterManagerRestStub): + def __hash__(self): + return hash("GetCluster") + + def __call__( + self, + request: cluster_service.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.cluster_service.GetClusterRequest): + The request object. GetClusterRequest gets the settings + of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Cluster: + A Google Kubernetes Engine cluster. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = cluster_service.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Cluster() + pb_resp = cluster_service.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _GetJSONWebKeys(ClusterManagerRestStub): + def __hash__(self): + return hash("GetJSONWebKeys") + + def __call__( + self, + request: cluster_service.GetJSONWebKeysRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.GetJSONWebKeysResponse: + r"""Call the get json web keys method over HTTP. + + Args: + request (~.cluster_service.GetJSONWebKeysRequest): + The request object. GetJSONWebKeysRequest gets the public component of the + keys used by the cluster to sign token requests. This + will be the jwks_uri for the discover document returned + by getOpenIDConfig. See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.GetJSONWebKeysResponse: + GetJSONWebKeysResponse is a valid + JSON Web Key Set as specififed in rfc + 7517 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/jwks", + }, + ] + request, metadata = self._interceptor.pre_get_json_web_keys( + request, metadata + ) + pb_request = cluster_service.GetJSONWebKeysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.GetJSONWebKeysResponse() + pb_resp = cluster_service.GetJSONWebKeysResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_json_web_keys(resp) + return resp + + class _GetNodePool(ClusterManagerRestStub): + def __hash__(self): + return hash("GetNodePool") + + def __call__( + self, + request: cluster_service.GetNodePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.NodePool: + r"""Call the get node pool method over HTTP. + + Args: + request (~.cluster_service.GetNodePoolRequest): + The request object. GetNodePoolRequest retrieves a node + pool for a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.NodePool: + NodePool contains the name and + configuration for a cluster's node pool. + Node pools are a set of nodes (i.e. + VM's), with a common configuration and + specification, under the control of the + cluster master. They may have a set of + Kubernetes labels applied to them, which + may be used to reference them during pod + scheduling. They may also be resized up + or down, to accommodate the workload. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}", + }, + ] + request, metadata = self._interceptor.pre_get_node_pool(request, metadata) + pb_request = cluster_service.GetNodePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.NodePool() + pb_resp = cluster_service.NodePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_node_pool(resp) + return resp + + class _GetOperation(ClusterManagerRestStub): + def __hash__(self): + return hash("GetOperation") + + def __call__( + self, + request: cluster_service.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (~.cluster_service.GetOperationRequest): + The request object. GetOperationRequest gets a single + operation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/operations/{operation_id}", + }, + ] + request, metadata = self._interceptor.pre_get_operation(request, metadata) + pb_request = cluster_service.GetOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_operation(resp) + return resp + + class _GetServerConfig(ClusterManagerRestStub): + def __hash__(self): + return hash("GetServerConfig") + + def __call__( + self, + request: cluster_service.GetServerConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ServerConfig: + r"""Call the get server config method over HTTP. + + Args: + request (~.cluster_service.GetServerConfigRequest): + The request object. Gets the current Kubernetes Engine + service configuration. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.ServerConfig: + Kubernetes Engine service + configuration. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/serverConfig", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/serverconfig", + }, + ] + request, metadata = self._interceptor.pre_get_server_config( + request, metadata + ) + pb_request = cluster_service.GetServerConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.ServerConfig() + pb_resp = cluster_service.ServerConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_server_config(resp) + return resp + + class _ListClusters(ClusterManagerRestStub): + def __hash__(self): + return hash("ListClusters") + + def __call__( + self, + request: cluster_service.ListClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListClustersResponse: + r"""Call the list clusters method over HTTP. + + Args: + request (~.cluster_service.ListClustersRequest): + The request object. ListClustersRequest lists clusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.ListClustersResponse: + ListClustersResponse is the result of + ListClustersRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/clusters", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters", + }, + ] + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = cluster_service.ListClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.ListClustersResponse() + pb_resp = cluster_service.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) + return resp + + class _ListNodePools(ClusterManagerRestStub): + def __hash__(self): + return hash("ListNodePools") + + def __call__( + self, + request: cluster_service.ListNodePoolsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListNodePoolsResponse: + r"""Call the list node pools method over HTTP. + + Args: + request (~.cluster_service.ListNodePoolsRequest): + The request object. ListNodePoolsRequest lists the node + pool(s) for a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.ListNodePoolsResponse: + ListNodePoolsResponse is the result + of ListNodePoolsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/nodePools", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools", + }, + ] + request, metadata = self._interceptor.pre_list_node_pools(request, metadata) + pb_request = cluster_service.ListNodePoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.ListNodePoolsResponse() + pb_resp = cluster_service.ListNodePoolsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_node_pools(resp) + return resp + + class _ListOperations(ClusterManagerRestStub): + def __hash__(self): + return hash("ListOperations") + + def __call__( + self, + request: cluster_service.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (~.cluster_service.ListOperationsRequest): + The request object. ListOperationsRequest lists + operations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.ListOperationsResponse: + ListOperationsResponse is the result + of ListOperationsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/projects/{project_id}/zones/{zone}/operations", + }, + ] + request, metadata = self._interceptor.pre_list_operations(request, metadata) + pb_request = cluster_service.ListOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.ListOperationsResponse() + pb_resp = cluster_service.ListOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_operations(resp) + return resp + + class _ListUsableSubnetworks(ClusterManagerRestStub): + def __hash__(self): + return hash("ListUsableSubnetworks") + + def __call__( + self, + request: cluster_service.ListUsableSubnetworksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListUsableSubnetworksResponse: + r"""Call the list usable subnetworks method over HTTP. + + Args: + request (~.cluster_service.ListUsableSubnetworksRequest): + The request object. ListUsableSubnetworksRequest requests + the list of usable subnetworks available + to a user for creating clusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.ListUsableSubnetworksResponse: + ListUsableSubnetworksResponse is the + response of + ListUsableSubnetworksRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/aggregated/usableSubnetworks", + }, + ] + request, metadata = self._interceptor.pre_list_usable_subnetworks( + request, metadata + ) + pb_request = cluster_service.ListUsableSubnetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.ListUsableSubnetworksResponse() + pb_resp = cluster_service.ListUsableSubnetworksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable_subnetworks(resp) + return resp + + class _RollbackNodePoolUpgrade(ClusterManagerRestStub): + def __hash__(self): + return hash("RollbackNodePoolUpgrade") + + def __call__( + self, + request: cluster_service.RollbackNodePoolUpgradeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the rollback node pool + upgrade method over HTTP. + + Args: + request (~.cluster_service.RollbackNodePoolUpgradeRequest): + The request object. RollbackNodePoolUpgradeRequest + rollbacks the previously Aborted or + Failed NodePool upgrade. This will be an + no-op if the last upgrade successfully + completed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}:rollback", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}:rollback", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rollback_node_pool_upgrade( + request, metadata + ) + pb_request = cluster_service.RollbackNodePoolUpgradeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback_node_pool_upgrade(resp) + return resp + + class _SetAddonsConfig(ClusterManagerRestStub): + def __hash__(self): + return hash("SetAddonsConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetAddonsConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set addons config method over HTTP. + + Args: + request (~.cluster_service.SetAddonsConfigRequest): + The request object. SetAddonsConfigRequest sets the + addons associated with the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setAddons", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/addons", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_addons_config( + request, metadata + ) + pb_request = cluster_service.SetAddonsConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_addons_config(resp) + return resp + + class _SetLabels(ClusterManagerRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetLabelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.cluster_service.SetLabelsRequest): + The request object. SetLabelsRequest sets the Google + Cloud Platform labels on a Google + Container Engine cluster, which will in + turn set them for Google Compute Engine + resources used by that cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setResourceLabels", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/resourceLabels", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = cluster_service.SetLabelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _SetLegacyAbac(ClusterManagerRestStub): + def __hash__(self): + return hash("SetLegacyAbac") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetLegacyAbacRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set legacy abac method over HTTP. + + Args: + request (~.cluster_service.SetLegacyAbacRequest): + The request object. SetLegacyAbacRequest enables or + disables the ABAC authorization + mechanism for a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setLegacyAbac", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/legacyAbac", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_legacy_abac(request, metadata) + pb_request = cluster_service.SetLegacyAbacRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_legacy_abac(resp) + return resp + + class _SetLocations(ClusterManagerRestStub): + def __hash__(self): + return hash("SetLocations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set locations method over HTTP. + + Args: + request (~.cluster_service.SetLocationsRequest): + The request object. SetLocationsRequest sets the + locations of the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setLocations", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/locations", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_locations(request, metadata) + pb_request = cluster_service.SetLocationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_locations(resp) + return resp + + class _SetLoggingService(ClusterManagerRestStub): + def __hash__(self): + return hash("SetLoggingService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetLoggingServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set logging service method over HTTP. + + Args: + request (~.cluster_service.SetLoggingServiceRequest): + The request object. SetLoggingServiceRequest sets the + logging service of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setLogging", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/logging", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_logging_service( + request, metadata + ) + pb_request = cluster_service.SetLoggingServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_logging_service(resp) + return resp + + class _SetMaintenancePolicy(ClusterManagerRestStub): + def __hash__(self): + return hash("SetMaintenancePolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetMaintenancePolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set maintenance policy method over HTTP. + + Args: + request (~.cluster_service.SetMaintenancePolicyRequest): + The request object. SetMaintenancePolicyRequest sets the + maintenance policy for a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setMaintenancePolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}:setMaintenancePolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_maintenance_policy( + request, metadata + ) + pb_request = cluster_service.SetMaintenancePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_maintenance_policy(resp) + return resp + + class _SetMasterAuth(ClusterManagerRestStub): + def __hash__(self): + return hash("SetMasterAuth") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetMasterAuthRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set master auth method over HTTP. + + Args: + request (~.cluster_service.SetMasterAuthRequest): + The request object. SetMasterAuthRequest updates the + admin password of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setMasterAuth", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}:setMasterAuth", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_master_auth(request, metadata) + pb_request = cluster_service.SetMasterAuthRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_master_auth(resp) + return resp + + class _SetMonitoringService(ClusterManagerRestStub): + def __hash__(self): + return hash("SetMonitoringService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetMonitoringServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set monitoring service method over HTTP. + + Args: + request (~.cluster_service.SetMonitoringServiceRequest): + The request object. SetMonitoringServiceRequest sets the + monitoring service of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setMonitoring", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/monitoring", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_monitoring_service( + request, metadata + ) + pb_request = cluster_service.SetMonitoringServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_monitoring_service(resp) + return resp + + class _SetNetworkPolicy(ClusterManagerRestStub): + def __hash__(self): + return hash("SetNetworkPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetNetworkPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set network policy method over HTTP. + + Args: + request (~.cluster_service.SetNetworkPolicyRequest): + The request object. SetNetworkPolicyRequest + enables/disables network policy for a + cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:setNetworkPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}:setNetworkPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_network_policy( + request, metadata + ) + pb_request = cluster_service.SetNetworkPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_network_policy(resp) + return resp + + class _SetNodePoolAutoscaling(ClusterManagerRestStub): + def __hash__(self): + return hash("SetNodePoolAutoscaling") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetNodePoolAutoscalingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set node pool autoscaling method over HTTP. + + Args: + request (~.cluster_service.SetNodePoolAutoscalingRequest): + The request object. SetNodePoolAutoscalingRequest sets + the autoscaler settings of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}:setAutoscaling", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}/autoscaling", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_node_pool_autoscaling( + request, metadata + ) + pb_request = cluster_service.SetNodePoolAutoscalingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_node_pool_autoscaling(resp) + return resp + + class _SetNodePoolManagement(ClusterManagerRestStub): + def __hash__(self): + return hash("SetNodePoolManagement") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetNodePoolManagementRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set node pool management method over HTTP. + + Args: + request (~.cluster_service.SetNodePoolManagementRequest): + The request object. SetNodePoolManagementRequest sets the + node management properties of a node + pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}:setManagement", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}/setManagement", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_node_pool_management( + request, metadata + ) + pb_request = cluster_service.SetNodePoolManagementRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_node_pool_management(resp) + return resp + + class _SetNodePoolSize(ClusterManagerRestStub): + def __hash__(self): + return hash("SetNodePoolSize") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.SetNodePoolSizeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the set node pool size method over HTTP. + + Args: + request (~.cluster_service.SetNodePoolSizeRequest): + The request object. SetNodePoolSizeRequest sets the size + of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}:setSize", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}/setSize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_node_pool_size( + request, metadata + ) + pb_request = cluster_service.SetNodePoolSizeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_node_pool_size(resp) + return resp + + class _StartIPRotation(ClusterManagerRestStub): + def __hash__(self): + return hash("StartIPRotation") + + def __call__( + self, + request: cluster_service.StartIPRotationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the start ip rotation method over HTTP. + + Args: + request (~.cluster_service.StartIPRotationRequest): + The request object. StartIPRotationRequest creates a new + IP for the cluster and then performs a + node upgrade on each node pool to point + to the new IP. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:startIpRotation", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}:startIpRotation", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_ip_rotation( + request, metadata + ) + pb_request = cluster_service.StartIPRotationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_ip_rotation(resp) + return resp + + class _UpdateCluster(ClusterManagerRestStub): + def __hash__(self): + return hash("UpdateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.UpdateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the update cluster method over HTTP. + + Args: + request (~.cluster_service.UpdateClusterRequest): + The request object. UpdateClusterRequest updates the + settings of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + "body": "*", + }, + { + "method": "put", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = cluster_service.UpdateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) + return resp + + class _UpdateMaster(ClusterManagerRestStub): + def __hash__(self): + return hash("UpdateMaster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.UpdateMasterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the update master method over HTTP. + + Args: + request (~.cluster_service.UpdateMasterRequest): + The request object. UpdateMasterRequest updates the + master of the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}:updateMaster", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/master", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_master(request, metadata) + pb_request = cluster_service.UpdateMasterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_master(resp) + return resp + + class _UpdateNodePool(ClusterManagerRestStub): + def __hash__(self): + return hash("UpdateNodePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cluster_service.UpdateNodePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Call the update node pool method over HTTP. + + Args: + request (~.cluster_service.UpdateNodePoolRequest): + The request object. UpdateNodePoolRequests update a node + pool's image and/or version. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cluster_service.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/projects/{project_id}/zones/{zone}/clusters/{cluster_id}/nodePools/{node_pool_id}/update", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_node_pool( + request, metadata + ) + pb_request = cluster_service.UpdateNodePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cluster_service.Operation() + pb_resp = cluster_service.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_node_pool(resp) + return resp + + @property + def cancel_operation( + self, + ) -> Callable[[cluster_service.CancelOperationRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + cluster_service.CheckAutopilotCompatibilityResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckAutopilotCompatibility(self._session, self._host, self._interceptor) # type: ignore + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CompleteIPRotation(self._session, self._host, self._interceptor) # type: ignore + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[[cluster_service.CompleteNodePoolUpgradeRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CompleteNodePoolUpgrade(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cluster( + self, + ) -> Callable[[cluster_service.CreateClusterRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_node_pool( + self, + ) -> Callable[[cluster_service.CreateNodePoolRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateNodePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cluster( + self, + ) -> Callable[[cluster_service.DeleteClusterRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_node_pool( + self, + ) -> Callable[[cluster_service.DeleteNodePoolRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteNodePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cluster( + self, + ) -> Callable[[cluster_service.GetClusterRequest], cluster_service.Cluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], cluster_service.GetJSONWebKeysResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJSONWebKeys(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_node_pool( + self, + ) -> Callable[[cluster_service.GetNodePoolRequest], cluster_service.NodePool]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNodePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation( + self, + ) -> Callable[[cluster_service.GetOperationRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], cluster_service.ServerConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetServerConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], cluster_service.ListClustersResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], cluster_service.ListNodePoolsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNodePools(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], cluster_service.ListOperationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + cluster_service.ListUsableSubnetworksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsableSubnetworks(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RollbackNodePoolUpgrade(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_addons_config( + self, + ) -> Callable[[cluster_service.SetAddonsConfigRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetAddonsConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels( + self, + ) -> Callable[[cluster_service.SetLabelsRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_legacy_abac( + self, + ) -> Callable[[cluster_service.SetLegacyAbacRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLegacyAbac(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_locations( + self, + ) -> Callable[[cluster_service.SetLocationsRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLocations(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLoggingService(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMaintenancePolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_master_auth( + self, + ) -> Callable[[cluster_service.SetMasterAuthRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMasterAuth(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMonitoringService(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_network_policy( + self, + ) -> Callable[[cluster_service.SetNetworkPolicyRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNetworkPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNodePoolAutoscaling(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], cluster_service.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNodePoolManagement(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_node_pool_size( + self, + ) -> Callable[[cluster_service.SetNodePoolSizeRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNodePoolSize(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_ip_rotation( + self, + ) -> Callable[[cluster_service.StartIPRotationRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartIPRotation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cluster( + self, + ) -> Callable[[cluster_service.UpdateClusterRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_master( + self, + ) -> Callable[[cluster_service.UpdateMasterRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateMaster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_node_pool( + self, + ) -> Callable[[cluster_service.UpdateNodePoolRequest], cluster_service.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateNodePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ClusterManagerRestTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py index 2f6b2945117e..58e9a257455d 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py @@ -41,6 +41,7 @@ CompleteNodePoolUpgradeRequest, ConfidentialNodes, ConfigConnectorConfig, + ContainerdConfig, CostManagementConfig, CreateClusterRequest, CreateNodePoolRequest, @@ -212,6 +213,7 @@ "CompleteNodePoolUpgradeRequest", "ConfidentialNodes", "ConfigConnectorConfig", + "ContainerdConfig", "CostManagementConfig", "CreateClusterRequest", "CreateNodePoolRequest", diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index e5923fc53673..ec79353763fc 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -47,6 +47,7 @@ "GcfsConfig", "ReservationAffinity", "SoleTenantConfig", + "ContainerdConfig", "NodeTaint", "NodeTaints", "NodeLabels", @@ -336,6 +337,8 @@ class InTransitEncryptionConfig(proto.Enum): class LinuxNodeConfig(proto.Message): r"""Parameters that can be configured on Linux nodes. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: sysctls (MutableMapping[str, str]): The Linux kernel parameters to be applied to the nodes and @@ -351,6 +354,10 @@ class LinuxNodeConfig(proto.Message): cgroup_mode (google.cloud.container_v1.types.LinuxNodeConfig.CgroupMode): cgroup_mode specifies the cgroup mode to be used on the node. + hugepages (google.cloud.container_v1.types.LinuxNodeConfig.HugepagesConfig): + Optional. Amounts for 2M and 1G hugepages + + This field is a member of `oneof`_ ``_hugepages``. """ class CgroupMode(proto.Enum): @@ -372,6 +379,33 @@ class CgroupMode(proto.Enum): CGROUP_MODE_V1 = 1 CGROUP_MODE_V2 = 2 + class HugepagesConfig(proto.Message): + r"""Hugepages amount in both 2m and 1g size + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hugepage_size2m (int): + Optional. Amount of 2M hugepages + + This field is a member of `oneof`_ ``_hugepage_size2m``. + hugepage_size1g (int): + Optional. Amount of 1G hugepages + + This field is a member of `oneof`_ ``_hugepage_size1g``. + """ + + hugepage_size2m: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + hugepage_size1g: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + sysctls: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -382,6 +416,12 @@ class CgroupMode(proto.Enum): number=2, enum=CgroupMode, ) + hugepages: HugepagesConfig = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=HugepagesConfig, + ) class WindowsNodeConfig(proto.Message): @@ -714,6 +754,8 @@ class NodeConfig(proto.Message): sole_tenant_config (google.cloud.container_v1.types.SoleTenantConfig): Parameters for node pools to be backed by shared sole tenant node groups. + containerd_config (google.cloud.container_v1.types.ContainerdConfig): + Parameters for containerd customization. resource_manager_tags (google.cloud.container_v1.types.ResourceManagerTags): A map of resource manager tag keys and values to be attached to the nodes. @@ -886,6 +928,11 @@ class NodeConfig(proto.Message): number=42, message="SoleTenantConfig", ) + containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=43, + message="ContainerdConfig", + ) resource_manager_tags: "ResourceManagerTags" = proto.Field( proto.MESSAGE, number=45, @@ -924,6 +971,11 @@ class AdvancedMachineFeatures(proto.Message): processor is assumed. This field is a member of `oneof`_ ``_threads_per_core``. + enable_nested_virtualization (bool): + Whether or not to enable nested + virtualization (defaults to false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. """ threads_per_core: int = proto.Field( @@ -931,6 +983,11 @@ class AdvancedMachineFeatures(proto.Message): number=1, optional=True, ) + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) class NodeNetworkConfig(proto.Message): @@ -1357,6 +1414,97 @@ class Operator(proto.Enum): ) +class ContainerdConfig(proto.Message): + r"""ContainerdConfig contains configuration to customize + containerd. + + Attributes: + private_registry_access_config (google.cloud.container_v1.types.ContainerdConfig.PrivateRegistryAccessConfig): + PrivateRegistryAccessConfig is used to + configure access configuration for private + container registries. + """ + + class PrivateRegistryAccessConfig(proto.Message): + r"""PrivateRegistryAccessConfig contains access configuration for + private container registries. + + Attributes: + enabled (bool): + Private registry access is enabled. + certificate_authority_domain_config (MutableSequence[google.cloud.container_v1.types.ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig]): + Private registry access configuration. + """ + + class CertificateAuthorityDomainConfig(proto.Message): + r"""CertificateAuthorityDomainConfig configures one or more fully + qualified domain names (FQDN) to a specific certificate. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fqdns (MutableSequence[str]): + List of fully qualified domain names (FQDN). + Specifying port is supported. + Wilcards are NOT supported. + Examples: + + - my.customdomain.com + - 10.0.1.2:5000 + gcp_secret_manager_certificate_config (google.cloud.container_v1.types.ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig.GCPSecretManagerCertificateConfig): + Google Secret Manager (GCP) certificate + configuration. + + This field is a member of `oneof`_ ``certificate_config``. + """ + + class GCPSecretManagerCertificateConfig(proto.Message): + r"""GCPSecretManagerCertificateConfig configures a secret from `Google + Secret Manager `__. + + Attributes: + secret_uri (str): + Secret URI, in the form + "projects/$PROJECT_ID/secrets/$SECRET_NAME/versions/$VERSION". + Version can be fixed (e.g. "2") or "latest". + """ + + secret_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + fqdns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + gcp_secret_manager_certificate_config: "ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig.GCPSecretManagerCertificateConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="certificate_config", + message="ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig.GCPSecretManagerCertificateConfig", + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + certificate_authority_domain_config: MutableSequence[ + "ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig", + ) + + private_registry_access_config: PrivateRegistryAccessConfig = proto.Field( + proto.MESSAGE, + number=1, + message=PrivateRegistryAccessConfig, + ) + + class NodeTaint(proto.Message): r"""Kubernetes taint is composed of three fields: key, value, and effect. Effect can only be one of three types: NoSchedule, @@ -2748,6 +2896,14 @@ class Cluster(proto.Message): Beta APIs Config enterprise_config (google.cloud.container_v1.types.EnterpriseConfig): GKE Enterprise Configuration. + satisfies_pzs (bool): + Output only. Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class Status(proto.Enum): @@ -3094,6 +3250,16 @@ class Status(proto.Enum): number=149, message="EnterpriseConfig", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=152, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=153, + optional=True, + ) class K8sBetaAPIConfig(proto.Message): @@ -3143,10 +3309,14 @@ class Mode(proto.Enum): BASIC (2): Applies Security Posture features on the cluster. + ENTERPRISE (3): + Applies the Security Posture off cluster + Enterprise level features. """ MODE_UNSPECIFIED = 0 DISABLED = 1 BASIC = 2 + ENTERPRISE = 3 class VulnerabilityMode(proto.Enum): r"""VulnerabilityMode defines enablement mode for vulnerability @@ -3202,6 +3372,12 @@ class NodePoolAutoConfig(proto.Message): attached to the nodes for managing Compute Engine firewalls using Network Firewall Policies. + node_kubelet_config (google.cloud.container_v1.types.NodeKubeletConfig): + NodeKubeletConfig controls the defaults for autoprovisioned + node-pools. + + Currently only ``insecure_kubelet_readonly_port_enabled`` + can be set here. """ network_tags: "NetworkTags" = proto.Field( @@ -3214,6 +3390,11 @@ class NodePoolAutoConfig(proto.Message): number=2, message="ResourceManagerTags", ) + node_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="NodeKubeletConfig", + ) class NodePoolDefaults(proto.Message): @@ -3241,6 +3422,13 @@ class NodeConfigDefaults(proto.Message): known as Riptide) options. logging_config (google.cloud.container_v1.types.NodePoolLoggingConfig): Logging configuration for node pools. + containerd_config (google.cloud.container_v1.types.ContainerdConfig): + Parameters for containerd customization. + node_kubelet_config (google.cloud.container_v1.types.NodeKubeletConfig): + NodeKubeletConfig controls the defaults for new node-pools. + + Currently only ``insecure_kubelet_readonly_port_enabled`` + can be set here. """ gcfs_config: "GcfsConfig" = proto.Field( @@ -3253,6 +3441,16 @@ class NodeConfigDefaults(proto.Message): number=3, message="NodePoolLoggingConfig", ) + containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ContainerdConfig", + ) + node_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="NodeKubeletConfig", + ) class ClusterUpdate(proto.Message): @@ -3367,6 +3565,11 @@ class ClusterUpdate(proto.Message): configuration. desired_private_cluster_config (google.cloud.container_v1.types.PrivateClusterConfig): The desired private cluster configuration. + master_global_access_config is the only field that can be + changed via this field. See also + [ClusterUpdate.desired_enable_private_endpoint][google.container.v1.ClusterUpdate.desired_enable_private_endpoint] + for modifying other fields within + [PrivateClusterConfig][google.container.v1.PrivateClusterConfig]. desired_intra_node_visibility_config (google.cloud.container_v1.types.IntraNodeVisibilityConfig): The desired config of Intra-node visibility. desired_default_snat_status (google.cloud.container_v1.types.DefaultSnatStatus): @@ -3473,6 +3676,9 @@ class ClusterUpdate(proto.Message): the autopilot cluster. desired_k8s_beta_apis (google.cloud.container_v1.types.K8sBetaAPIConfig): Desired Beta APIs to be enabled for cluster. + desired_containerd_config (google.cloud.container_v1.types.ContainerdConfig): + The desired containerd config for the + cluster. desired_enable_multi_networking (bool): Enable/Disable Multi-Networking for the cluster @@ -3492,6 +3698,14 @@ class ClusterUpdate(proto.Message): Policy for the cluster. This field is a member of `oneof`_ ``_desired_enable_cilium_clusterwide_network_policy``. + desired_node_kubelet_config (google.cloud.container_v1.types.NodeKubeletConfig): + The desired node kubelet config for the + cluster. + desired_node_pool_auto_config_kubelet_config (google.cloud.container_v1.types.NodeKubeletConfig): + The desired node kubelet config for all + auto-provisioned node pools in autopilot + clusters and node auto-provisioning enabled + clusters. """ desired_node_version: str = proto.Field( @@ -3733,6 +3947,11 @@ class ClusterUpdate(proto.Message): number=131, message="K8sBetaAPIConfig", ) + desired_containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=134, + message="ContainerdConfig", + ) desired_enable_multi_networking: bool = proto.Field( proto.BOOL, number=135, @@ -3756,6 +3975,16 @@ class ClusterUpdate(proto.Message): number=138, optional=True, ) + desired_node_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=141, + message="NodeKubeletConfig", + ) + desired_node_pool_auto_config_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=142, + message="NodeKubeletConfig", + ) class AdditionalPodRangesConfig(proto.Message): @@ -4439,6 +4668,11 @@ class UpdateNodePoolRequest(proto.Message): windows_node_config (google.cloud.container_v1.types.WindowsNodeConfig): Parameters that can be configured on Windows nodes. + accelerators (MutableSequence[google.cloud.container_v1.types.AcceleratorConfig]): + A list of hardware accelerators to be + attached to each node. See + https://cloud.google.com/compute/docs/gpus for + more information about support for GPUs. machine_type (str): Optional. The desired `Google Compute Engine machine type `__ @@ -4463,6 +4697,10 @@ class UpdateNodePoolRequest(proto.Message): Engine firewalls using Network Firewall Policies. Existing tags will be replaced with new values. + containerd_config (google.cloud.container_v1.types.ContainerdConfig): + The desired containerd config for nodes in + the node pool. Initiates an upgrade operation + that recreates the nodes with the new config. queued_provisioning (google.cloud.container_v1.types.NodePool.QueuedProvisioning): Specifies the configuration of queued provisioning. @@ -4579,6 +4817,11 @@ class UpdateNodePoolRequest(proto.Message): number=34, message="WindowsNodeConfig", ) + accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=35, + message="AcceleratorConfig", + ) machine_type: str = proto.Field( proto.STRING, number=36, @@ -4596,6 +4839,11 @@ class UpdateNodePoolRequest(proto.Message): number=39, message="ResourceManagerTags", ) + containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=40, + message="ContainerdConfig", + ) queued_provisioning: "NodePool.QueuedProvisioning" = proto.Field( proto.MESSAGE, number=42, @@ -7271,9 +7519,13 @@ class GPUSharingStrategy(proto.Enum): Default value. TIME_SHARING (1): GPUs are time-shared between containers. + MPS (2): + GPUs are shared between containers with + NVIDIA MPS. """ GPU_SHARING_STRATEGY_UNSPECIFIED = 0 TIME_SHARING = 1 + MPS = 2 max_shared_clients_per_gpu: int = proto.Field( proto.INT64, @@ -7593,6 +7845,7 @@ class NetworkConfig(proto.Message): This field is a member of `oneof`_ ``_enable_fqdn_network_policy``. in_transit_encryption_config (google.cloud.container_v1.types.InTransitEncryptionConfig): Specify the details of in-transit encryption. + Now named inter-node transparent encryption. This field is a member of `oneof`_ ``_in_transit_encryption_config``. enable_cilium_clusterwide_network_policy (bool): @@ -8152,6 +8405,9 @@ class DNSConfig(proto.Message): cluster_dns_domain (str): cluster_dns_domain is the suffix used for all cluster service records. + additive_vpc_scope_dns_domain (str): + Optional. The domain used in Additive VPC + scope. """ class Provider(proto.Enum): @@ -8206,6 +8462,10 @@ class DNSScope(proto.Enum): proto.STRING, number=3, ) + additive_vpc_scope_dns_domain: str = proto.Field( + proto.STRING, + number=5, + ) class MaxPodsConstraint(proto.Message): @@ -9280,6 +9540,10 @@ class Component(proto.Enum): Deployment STATEFULSET (12): Statefulset + CADVISOR (13): + CADVISOR + KUBELET (14): + KUBELET """ COMPONENT_UNSPECIFIED = 0 SYSTEM_COMPONENTS = 1 @@ -9292,6 +9556,8 @@ class Component(proto.Enum): DAEMONSET = 10 DEPLOYMENT = 11 STATEFULSET = 12 + CADVISOR = 13 + KUBELET = 14 enable_components: MutableSequence[Component] = proto.RepeatedField( proto.ENUM, diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py index fa5d29eb858a..29e7e85fa5d7 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py @@ -47,8 +47,10 @@ ClusterUpdate, CompleteIPRotationRequest, CompleteNodePoolUpgradeRequest, + CompliancePostureConfig, ConfidentialNodes, ConfigConnectorConfig, + ContainerdConfig, CostManagementConfig, CreateClusterRequest, CreateNodePoolRequest, @@ -237,8 +239,10 @@ "ClusterUpdate", "CompleteIPRotationRequest", "CompleteNodePoolUpgradeRequest", + "CompliancePostureConfig", "ConfidentialNodes", "ConfigConnectorConfig", + "ContainerdConfig", "CostManagementConfig", "CreateClusterRequest", "CreateNodePoolRequest", diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 558c8aab67c5..4411a7dddf3d 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.47.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py index 0db35ac6a899..355ff531f469 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py @@ -41,8 +41,10 @@ ClusterUpdate, CompleteIPRotationRequest, CompleteNodePoolUpgradeRequest, + CompliancePostureConfig, ConfidentialNodes, ConfigConnectorConfig, + ContainerdConfig, CostManagementConfig, CreateClusterRequest, CreateNodePoolRequest, @@ -229,8 +231,10 @@ "ClusterUpdate", "CompleteIPRotationRequest", "CompleteNodePoolUpgradeRequest", + "CompliancePostureConfig", "ConfidentialNodes", "ConfigConnectorConfig", + "ContainerdConfig", "CostManagementConfig", "CreateClusterRequest", "CreateNodePoolRequest", diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py index 74eeabc26294..4339c41e2b9f 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -50,6 +50,7 @@ "GcfsConfig", "ReservationAffinity", "SoleTenantConfig", + "ContainerdConfig", "HostMaintenancePolicy", "NodeTaint", "NodeTaints", @@ -85,6 +86,7 @@ "AuthenticatorGroupsConfig", "ClusterTelemetry", "Cluster", + "CompliancePostureConfig", "K8sBetaAPIConfig", "WorkloadConfig", "ProtectConfig", @@ -336,6 +338,8 @@ class InTransitEncryptionConfig(proto.Enum): class LinuxNodeConfig(proto.Message): r"""Parameters that can be configured on Linux nodes. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: sysctls (MutableMapping[str, str]): The Linux kernel parameters to be applied to the nodes and @@ -351,6 +355,10 @@ class LinuxNodeConfig(proto.Message): cgroup_mode (google.cloud.container_v1beta1.types.LinuxNodeConfig.CgroupMode): cgroup_mode specifies the cgroup mode to be used on the node. + hugepages (google.cloud.container_v1beta1.types.LinuxNodeConfig.HugepagesConfig): + Optional. Amounts for 2M and 1G hugepages + + This field is a member of `oneof`_ ``_hugepages``. """ class CgroupMode(proto.Enum): @@ -372,6 +380,33 @@ class CgroupMode(proto.Enum): CGROUP_MODE_V1 = 1 CGROUP_MODE_V2 = 2 + class HugepagesConfig(proto.Message): + r"""Hugepages amount in both 2m and 1g size + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hugepage_size2m (int): + Optional. Amount of 2M hugepages + + This field is a member of `oneof`_ ``_hugepage_size2m``. + hugepage_size1g (int): + Optional. Amount of 1G hugepages + + This field is a member of `oneof`_ ``_hugepage_size1g``. + """ + + hugepage_size2m: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + hugepage_size1g: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + sysctls: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -382,6 +417,12 @@ class CgroupMode(proto.Enum): number=2, enum=CgroupMode, ) + hugepages: HugepagesConfig = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=HugepagesConfig, + ) class WindowsNodeConfig(proto.Message): @@ -718,6 +759,8 @@ class NodeConfig(proto.Message): sole_tenant_config (google.cloud.container_v1beta1.types.SoleTenantConfig): Parameters for node pools to be backed by shared sole tenant node groups. + containerd_config (google.cloud.container_v1beta1.types.ContainerdConfig): + Parameters for containerd customization. host_maintenance_policy (google.cloud.container_v1beta1.types.HostMaintenancePolicy): HostMaintenancePolicy contains the desired maintenance policy for the Google Compute Engine @@ -899,6 +942,11 @@ class NodeConfig(proto.Message): number=42, message="SoleTenantConfig", ) + containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=43, + message="ContainerdConfig", + ) host_maintenance_policy: "HostMaintenancePolicy" = proto.Field( proto.MESSAGE, number=44, @@ -942,6 +990,11 @@ class AdvancedMachineFeatures(proto.Message): processor is assumed. This field is a member of `oneof`_ ``_threads_per_core``. + enable_nested_virtualization (bool): + Whether or not to enable nested + virtualization (defaults to false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. """ threads_per_core: int = proto.Field( @@ -949,6 +1002,11 @@ class AdvancedMachineFeatures(proto.Message): number=1, optional=True, ) + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) class NodeNetworkConfig(proto.Message): @@ -1497,6 +1555,97 @@ class Operator(proto.Enum): ) +class ContainerdConfig(proto.Message): + r"""ContainerdConfig contains configuration to customize + containerd. + + Attributes: + private_registry_access_config (google.cloud.container_v1beta1.types.ContainerdConfig.PrivateRegistryAccessConfig): + PrivateRegistryAccessConfig is used to + configure access configuration for private + container registries. + """ + + class PrivateRegistryAccessConfig(proto.Message): + r"""PrivateRegistryAccessConfig contains access configuration for + private container registries. + + Attributes: + enabled (bool): + Private registry access is enabled. + certificate_authority_domain_config (MutableSequence[google.cloud.container_v1beta1.types.ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig]): + Private registry access configuration. + """ + + class CertificateAuthorityDomainConfig(proto.Message): + r"""CertificateAuthorityDomainConfig configures one or more fully + qualified domain names (FQDN) to a specific certificate. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fqdns (MutableSequence[str]): + List of fully qualified domain names (FQDN). + Specifying port is supported. + Wilcards are NOT supported. + Examples: + + - my.customdomain.com + - 10.0.1.2:5000 + gcp_secret_manager_certificate_config (google.cloud.container_v1beta1.types.ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig.GCPSecretManagerCertificateConfig): + Google Secret Manager (GCP) certificate + configuration. + + This field is a member of `oneof`_ ``certificate_config``. + """ + + class GCPSecretManagerCertificateConfig(proto.Message): + r"""GCPSecretManagerCertificateConfig configures a secret from `Google + Secret Manager `__. + + Attributes: + secret_uri (str): + Secret URI, in the form + "projects/$PROJECT_ID/secrets/$SECRET_NAME/versions/$VERSION". + Version can be fixed (e.g. "2") or "latest". + """ + + secret_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + fqdns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + gcp_secret_manager_certificate_config: "ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig.GCPSecretManagerCertificateConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="certificate_config", + message="ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig.GCPSecretManagerCertificateConfig", + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + certificate_authority_domain_config: MutableSequence[ + "ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ContainerdConfig.PrivateRegistryAccessConfig.CertificateAuthorityDomainConfig", + ) + + private_registry_access_config: PrivateRegistryAccessConfig = proto.Field( + proto.MESSAGE, + number=1, + message=PrivateRegistryAccessConfig, + ) + + class HostMaintenancePolicy(proto.Message): r"""HostMaintenancePolicy contains the maintenance policy for the hosts on which the GKE VMs run on. @@ -2783,7 +2932,8 @@ class PolicyBinding(proto.Message): Attributes: name (str): The relative resource name of the binauthz platform policy - to audit. GKE platform policies have the following format: + to evaluate. GKE platform policies have the following + format: ``projects/{project_number}/platforms/gke/policies/{policy_id}``. This field is a member of `oneof`_ ``_name``. @@ -3247,6 +3397,17 @@ class Cluster(proto.Message): GKE Enterprise Configuration. secret_manager_config (google.cloud.container_v1beta1.types.SecretManagerConfig): Secret CSI driver configuration. + compliance_posture_config (google.cloud.container_v1beta1.types.CompliancePostureConfig): + Enable/Disable Compliance Posture features + for the cluster. + satisfies_pzs (bool): + Output only. Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class Status(proto.Enum): @@ -3641,6 +3802,86 @@ class Status(proto.Enum): number=150, message="SecretManagerConfig", ) + compliance_posture_config: "CompliancePostureConfig" = proto.Field( + proto.MESSAGE, + number=151, + message="CompliancePostureConfig", + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=152, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=153, + optional=True, + ) + + +class CompliancePostureConfig(proto.Message): + r"""CompliancePostureConfig defines the settings needed to + enable/disable features for the Compliance Posture. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.cloud.container_v1beta1.types.CompliancePostureConfig.Mode): + Defines the enablement mode for Compliance + Posture. + + This field is a member of `oneof`_ ``_mode``. + compliance_standards (MutableSequence[google.cloud.container_v1beta1.types.CompliancePostureConfig.ComplianceStandard]): + List of enabled compliance standards. + """ + + class Mode(proto.Enum): + r"""Mode defines enablement mode for Compliance Posture. + + Values: + MODE_UNSPECIFIED (0): + Default value not specified. + DISABLED (1): + Disables Compliance Posture features on the + cluster. + ENABLED (2): + Enables Compliance Posture features on the + cluster. + """ + MODE_UNSPECIFIED = 0 + DISABLED = 1 + ENABLED = 2 + + class ComplianceStandard(proto.Message): + r"""Defines the details of a compliance standard. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + standard (str): + Name of the compliance standard. + + This field is a member of `oneof`_ ``_standard``. + """ + + standard: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Mode, + ) + compliance_standards: MutableSequence[ComplianceStandard] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ComplianceStandard, + ) class K8sBetaAPIConfig(proto.Message): @@ -3794,10 +4035,14 @@ class Mode(proto.Enum): BASIC (2): Applies Security Posture features on the cluster. + ENTERPRISE (3): + Applies the Security Posture off cluster + Enterprise level features. """ MODE_UNSPECIFIED = 0 DISABLED = 1 BASIC = 2 + ENTERPRISE = 3 class VulnerabilityMode(proto.Enum): r"""VulnerabilityMode defines enablement mode for vulnerability @@ -3860,10 +4105,17 @@ class NodeConfigDefaults(proto.Message): known as Riptide) options. logging_config (google.cloud.container_v1beta1.types.NodePoolLoggingConfig): Logging configuration for node pools. + containerd_config (google.cloud.container_v1beta1.types.ContainerdConfig): + Parameters for containerd customization. host_maintenance_policy (google.cloud.container_v1beta1.types.HostMaintenancePolicy): HostMaintenancePolicy contains the desired maintenance policy for the Google Compute Engine hosts. + node_kubelet_config (google.cloud.container_v1beta1.types.NodeKubeletConfig): + NodeKubeletConfig controls the defaults for new node-pools. + + Currently only ``insecure_kubelet_readonly_port_enabled`` + can be set here. """ gcfs_config: "GcfsConfig" = proto.Field( @@ -3876,11 +4128,21 @@ class NodeConfigDefaults(proto.Message): number=3, message="NodePoolLoggingConfig", ) + containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ContainerdConfig", + ) host_maintenance_policy: "HostMaintenancePolicy" = proto.Field( proto.MESSAGE, number=5, message="HostMaintenancePolicy", ) + node_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="NodeKubeletConfig", + ) class NodePoolAutoConfig(proto.Message): @@ -3901,6 +4163,12 @@ class NodePoolAutoConfig(proto.Message): attached to the nodes for managing Compute Engine firewalls using Network Firewall Policies. + node_kubelet_config (google.cloud.container_v1beta1.types.NodeKubeletConfig): + NodeKubeletConfig controls the defaults for autoprovisioned + node-pools. + + Currently only ``insecure_kubelet_readonly_port_enabled`` + can be set here. """ network_tags: "NetworkTags" = proto.Field( @@ -3913,6 +4181,11 @@ class NodePoolAutoConfig(proto.Message): number=2, message="ResourceManagerTags", ) + node_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="NodeKubeletConfig", + ) class ClusterUpdate(proto.Message): @@ -4016,6 +4289,11 @@ class ClusterUpdate(proto.Message): configuration. desired_private_cluster_config (google.cloud.container_v1beta1.types.PrivateClusterConfig): The desired private cluster configuration. + master_global_access_config is the only field that can be + changed via this field. See also + [ClusterUpdate.desired_enable_private_endpoint][google.container.v1beta1.ClusterUpdate.desired_enable_private_endpoint] + for modifying other fields within + [PrivateClusterConfig][google.container.v1beta1.PrivateClusterConfig]. desired_intra_node_visibility_config (google.cloud.container_v1beta1.types.IntraNodeVisibilityConfig): The desired config of Intra-node visibility. desired_default_snat_status (google.cloud.container_v1beta1.types.DefaultSnatStatus): @@ -4026,6 +4304,11 @@ class ClusterUpdate(proto.Message): cluster. desired_release_channel (google.cloud.container_v1beta1.types.ReleaseChannel): The desired release channel configuration. + private_cluster_config (google.cloud.container_v1beta1.types.PrivateClusterConfig): + The desired private cluster configuration. Has no effect. + Use + [desired_private_cluster_config][google.container.v1beta1.ClusterUpdate.desired_private_cluster_config] + instead. desired_tpu_config (google.cloud.container_v1beta1.types.TpuConfig): The desired Cloud TPU configuration. desired_l4ilb_subsetting_config (google.cloud.container_v1beta1.types.ILBSubsettingConfig): @@ -4162,6 +4445,9 @@ class ClusterUpdate(proto.Message): HostMaintenancePolicy contains the desired maintenance policy for the Google Compute Engine hosts. + desired_containerd_config (google.cloud.container_v1beta1.types.ContainerdConfig): + The desired containerd config for the + cluster. desired_enable_multi_networking (bool): Enable/Disable Multi-Networking for the cluster @@ -4174,6 +4460,7 @@ class ClusterUpdate(proto.Message): clusters. desired_in_transit_encryption_config (google.cloud.container_v1beta1.types.InTransitEncryptionConfig): Specify the details of in-transit encryption. + Now named inter-node transparent encryption. This field is a member of `oneof`_ ``_desired_in_transit_encryption_config``. desired_enable_cilium_clusterwide_network_policy (bool): @@ -4185,6 +4472,14 @@ class ClusterUpdate(proto.Message): Enable/Disable Secret Manager Config. This field is a member of `oneof`_ ``_desired_secret_manager_config``. + desired_node_kubelet_config (google.cloud.container_v1beta1.types.NodeKubeletConfig): + The desired node kubelet config for the + cluster. + desired_node_pool_auto_config_kubelet_config (google.cloud.container_v1beta1.types.NodeKubeletConfig): + The desired node kubelet config for all + auto-provisioned node pools in autopilot + clusters and node auto-provisioning enabled + clusters. """ desired_node_version: str = proto.Field( @@ -4278,6 +4573,11 @@ class ClusterUpdate(proto.Message): number=31, message="ReleaseChannel", ) + private_cluster_config: "PrivateClusterConfig" = proto.Field( + proto.MESSAGE, + number=37, + message="PrivateClusterConfig", + ) desired_tpu_config: "TpuConfig" = proto.Field( proto.MESSAGE, number=38, @@ -4467,6 +4767,11 @@ class ClusterUpdate(proto.Message): number=132, message="HostMaintenancePolicy", ) + desired_containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=134, + message="ContainerdConfig", + ) desired_enable_multi_networking: bool = proto.Field( proto.BOOL, number=135, @@ -4496,6 +4801,16 @@ class ClusterUpdate(proto.Message): optional=True, message="SecretManagerConfig", ) + desired_node_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=141, + message="NodeKubeletConfig", + ) + desired_node_pool_auto_config_kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=142, + message="NodeKubeletConfig", + ) class AdditionalPodRangesConfig(proto.Message): @@ -5178,6 +5493,11 @@ class UpdateNodePoolRequest(proto.Message): windows_node_config (google.cloud.container_v1beta1.types.WindowsNodeConfig): Parameters that can be configured on Windows nodes. + accelerators (MutableSequence[google.cloud.container_v1beta1.types.AcceleratorConfig]): + A list of hardware accelerators to be + attached to each node. See + https://cloud.google.com/compute/docs/gpus for + more information about support for GPUs. machine_type (str): Optional. The desired machine type for nodes in the node pool. Initiates an upgrade operation @@ -5199,6 +5519,10 @@ class UpdateNodePoolRequest(proto.Message): Engine firewalls using Network Firewall Policies. Existing tags will be replaced with new values. + containerd_config (google.cloud.container_v1beta1.types.ContainerdConfig): + The desired containerd config for nodes in + the node pool. Initiates an upgrade operation + that recreates the nodes with the new config. queued_provisioning (google.cloud.container_v1beta1.types.NodePool.QueuedProvisioning): Specifies the configuration of queued provisioning. @@ -5315,6 +5639,11 @@ class UpdateNodePoolRequest(proto.Message): number=34, message="WindowsNodeConfig", ) + accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=35, + message="AcceleratorConfig", + ) machine_type: str = proto.Field( proto.STRING, number=36, @@ -5332,6 +5661,11 @@ class UpdateNodePoolRequest(proto.Message): number=39, message="ResourceManagerTags", ) + containerd_config: "ContainerdConfig" = proto.Field( + proto.MESSAGE, + number=40, + message="ContainerdConfig", + ) queued_provisioning: "NodePool.QueuedProvisioning" = proto.Field( proto.MESSAGE, number=42, @@ -8124,9 +8458,13 @@ class GPUSharingStrategy(proto.Enum): Default value. TIME_SHARING (1): GPUs are time-shared between containers. + MPS (2): + GPUs are shared between containers with + NVIDIA MPS. """ GPU_SHARING_STRATEGY_UNSPECIFIED = 0 TIME_SHARING = 1 + MPS = 2 max_shared_clients_per_gpu: int = proto.Field( proto.INT64, @@ -9031,6 +9369,9 @@ class DNSConfig(proto.Message): cluster_dns_domain (str): cluster_dns_domain is the suffix used for all cluster service records. + additive_vpc_scope_dns_domain (str): + Optional. The domain used in Additive VPC + scope. """ class Provider(proto.Enum): @@ -9085,6 +9426,10 @@ class DNSScope(proto.Enum): proto.STRING, number=3, ) + additive_vpc_scope_dns_domain: str = proto.Field( + proto.STRING, + number=5, + ) class MaxPodsConstraint(proto.Message): @@ -10430,6 +10775,10 @@ class Component(proto.Enum): Deployment STATEFULSET (12): Statefulset + CADVISOR (13): + CADVISOR + KUBELET (14): + KUBELET """ COMPONENT_UNSPECIFIED = 0 SYSTEM_COMPONENTS = 1 @@ -10443,6 +10792,8 @@ class Component(proto.Enum): DAEMONSET = 10 DEPLOYMENT = 11 STATEFULSET = 12 + CADVISOR = 13 + KUBELET = 14 enable_components: MutableSequence[Component] = proto.RepeatedField( proto.ENUM, diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index 477de1eee441..1764b6d883e6 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "0.1.0" + "version": "2.47.0" }, "snippets": [ { @@ -2716,6 +2716,10 @@ "name": "zone", "type": "str" }, + { + "name": "parent", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2800,6 +2804,10 @@ "name": "zone", "type": "str" }, + { + "name": "parent", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index afdb562b267e..0944e4c0d47b 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "0.1.0" + "version": "2.47.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py b/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py index a38456954a02..6140b6dbba6c 100644 --- a/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py +++ b/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py @@ -72,7 +72,7 @@ class containerCallTransformer(cst.CSTTransformer): 'start_ip_rotation': ('project_id', 'zone', 'cluster_id', 'name', 'rotate_credentials', ), 'update_cluster': ('update', 'project_id', 'zone', 'cluster_id', 'name', ), 'update_master': ('master_version', 'project_id', 'zone', 'cluster_id', 'name', ), - 'update_node_pool': ('node_version', 'image_type', 'project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', 'locations', 'workload_metadata_config', 'upgrade_settings', 'tags', 'taints', 'labels', 'linux_node_config', 'kubelet_config', 'node_network_config', 'gcfs_config', 'confidential_nodes', 'gvnic', 'etag', 'fast_socket', 'logging_config', 'resource_labels', 'windows_node_config', 'machine_type', 'disk_type', 'disk_size_gb', 'resource_manager_tags', 'queued_provisioning', ), + 'update_node_pool': ('node_version', 'image_type', 'project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', 'locations', 'workload_metadata_config', 'upgrade_settings', 'tags', 'taints', 'labels', 'linux_node_config', 'kubelet_config', 'node_network_config', 'gcfs_config', 'confidential_nodes', 'gvnic', 'etag', 'fast_socket', 'logging_config', 'resource_labels', 'windows_node_config', 'accelerators', 'machine_type', 'disk_type', 'disk_size_gb', 'resource_manager_tags', 'containerd_config', 'queued_provisioning', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py b/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py index 5566c87180f2..b44a3331c2dc 100644 --- a/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py +++ b/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py @@ -73,7 +73,7 @@ class containerCallTransformer(cst.CSTTransformer): 'start_ip_rotation': ('project_id', 'zone', 'cluster_id', 'name', 'rotate_credentials', ), 'update_cluster': ('project_id', 'zone', 'cluster_id', 'update', 'name', ), 'update_master': ('project_id', 'zone', 'cluster_id', 'master_version', 'name', ), - 'update_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'node_version', 'image_type', 'locations', 'workload_metadata_config', 'name', 'upgrade_settings', 'tags', 'taints', 'labels', 'linux_node_config', 'kubelet_config', 'node_network_config', 'gcfs_config', 'confidential_nodes', 'gvnic', 'etag', 'fast_socket', 'logging_config', 'resource_labels', 'windows_node_config', 'machine_type', 'disk_type', 'disk_size_gb', 'resource_manager_tags', 'queued_provisioning', ), + 'update_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'node_version', 'image_type', 'locations', 'workload_metadata_config', 'name', 'upgrade_settings', 'tags', 'taints', 'labels', 'linux_node_config', 'kubelet_config', 'node_network_config', 'gcfs_config', 'confidential_nodes', 'gvnic', 'etag', 'fast_socket', 'logging_config', 'resource_labels', 'windows_node_config', 'accelerators', 'machine_type', 'disk_type', 'disk_size_gb', 'resource_manager_tags', 'containerd_config', 'queued_provisioning', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py index 81531a9ad8d2..6e1bce74c740 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,6 +34,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import code_pb2 # type: ignore @@ -41,6 +44,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.container_v1.services.cluster_manager import ( ClusterManagerAsyncClient, @@ -298,6 +303,7 @@ def test__get_universe_domain(): "client_class,transport_class,transport_name", [ (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), + (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -378,6 +384,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name [ (ClusterManagerClient, "grpc"), (ClusterManagerAsyncClient, "grpc_asyncio"), + (ClusterManagerClient, "rest"), ], ) def test_cluster_manager_client_from_service_account_info(client_class, transport_name): @@ -391,7 +398,11 @@ def test_cluster_manager_client_from_service_account_info(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("container.googleapis.com:443") + assert client.transport._host == ( + "container.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://container.googleapis.com" + ) @pytest.mark.parametrize( @@ -399,6 +410,7 @@ def test_cluster_manager_client_from_service_account_info(client_class, transpor [ (transports.ClusterManagerGrpcTransport, "grpc"), (transports.ClusterManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ClusterManagerRestTransport, "rest"), ], ) def test_cluster_manager_client_service_account_always_use_jwt( @@ -424,6 +436,7 @@ def test_cluster_manager_client_service_account_always_use_jwt( [ (ClusterManagerClient, "grpc"), (ClusterManagerAsyncClient, "grpc_asyncio"), + (ClusterManagerClient, "rest"), ], ) def test_cluster_manager_client_from_service_account_file(client_class, transport_name): @@ -444,13 +457,18 @@ def test_cluster_manager_client_from_service_account_file(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("container.googleapis.com:443") + assert client.transport._host == ( + "container.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://container.googleapis.com" + ) def test_cluster_manager_client_get_transport_class(): transport = ClusterManagerClient.get_transport_class() available_transports = [ transports.ClusterManagerGrpcTransport, + transports.ClusterManagerRestTransport, ] assert transport in available_transports @@ -467,6 +485,7 @@ def test_cluster_manager_client_get_transport_class(): transports.ClusterManagerGrpcAsyncIOTransport, "grpc_asyncio", ), + (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest"), ], ) @mock.patch.object( @@ -626,6 +645,8 @@ def test_cluster_manager_client_client_options( "grpc_asyncio", "false", ), + (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest", "true"), + (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -940,6 +961,7 @@ def test_cluster_manager_client_client_api_endpoint(client_class): transports.ClusterManagerGrpcAsyncIOTransport, "grpc_asyncio", ), + (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest"), ], ) def test_cluster_manager_client_client_options_scopes( @@ -982,6 +1004,7 @@ def test_cluster_manager_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest", None), ], ) def test_cluster_manager_client_client_options_credentials_file( @@ -1542,6 +1565,8 @@ def test_get_cluster(request_type, transport: str = "grpc"): tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", id="id_value", etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_cluster(request) @@ -1583,6 +1608,8 @@ def test_get_cluster(request_type, transport: str = "grpc"): assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" assert response.id == "id_value" assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_cluster_empty_call(): @@ -1717,6 +1744,8 @@ async def test_get_cluster_empty_call_async(): tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", id="id_value", etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_cluster() @@ -1819,6 +1848,8 @@ async def test_get_cluster_async( tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", id="id_value", etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_cluster(request) @@ -1861,6 +1892,8 @@ async def test_get_cluster_async( assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" assert response.id == "id_value" assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -7206,6 +7239,7 @@ def test_list_operations_flattened(): client.list_operations( project_id="project_id_value", zone="zone_value", + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7218,6 +7252,9 @@ def test_list_operations_flattened(): arg = args[0].zone mock_val = "zone_value" assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_operations_flattened_error(): @@ -7232,6 +7269,7 @@ def test_list_operations_flattened_error(): cluster_service.ListOperationsRequest(), project_id="project_id_value", zone="zone_value", + parent="parent_value", ) @@ -7254,6 +7292,7 @@ async def test_list_operations_flattened_async(): response = await client.list_operations( project_id="project_id_value", zone="zone_value", + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7266,6 +7305,9 @@ async def test_list_operations_flattened_async(): arg = args[0].zone mock_val = "zone_value" assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -7281,6 +7323,7 @@ async def test_list_operations_flattened_error_async(): cluster_service.ListOperationsRequest(), project_id="project_id_value", zone="zone_value", + parent="parent_value", ) @@ -15301,13 +15344,13 @@ def test_list_usable_subnetworks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_usable_subnetworks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15767,165 +15810,9385 @@ async def test_check_autopilot_compatibility_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ClusterManagerGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = ClusterManagerClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListClustersResponse( + missing_zones=["missing_zones_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListClustersResponse) + assert response.missing_zones == ["missing_zones_value"] + + +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ClusterManagerClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.ClusterManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClusterManagerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc - # It is an error to provide an api_key and a transport instance. - transport = transports.ClusterManagerGrpcTransport( + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClusterManagerClient( - client_options=options, - transport=transport, + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.ListClustersRequest.pb( + cluster_service.ListClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.ListClustersResponse.to_json( + cluster_service.ListClustersResponse() ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClusterManagerClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + request = cluster_service.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.ListClustersResponse() - # It is an error to provide scopes and a transport instance. - transport = transports.ClusterManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClusterManagerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClusterManagerGrpcTransport( + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=cluster_service.ListClustersRequest +): + client = ClusterManagerClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = ClusterManagerClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClusterManagerGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = ClusterManagerClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.ClusterManagerGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + cluster_service.ListClustersRequest(), + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClusterManagerGrpcTransport, - transports.ClusterManagerGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_list_clusters_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + cluster_service.GetClusterRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = ClusterManagerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_get_cluster_rest(request_type): client = ClusterManagerClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ClusterManagerGrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) -def test_cluster_manager_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ClusterManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Cluster( + name="name_value", + description="description_value", + initial_node_count=1911, + logging_service="logging_service_value", + monitoring_service="monitoring_service_value", + network="network_value", + cluster_ipv4_cidr="cluster_ipv4_cidr_value", + subnetwork="subnetwork_value", + locations=["locations_value"], + enable_kubernetes_alpha=True, + label_fingerprint="label_fingerprint_value", + self_link="self_link_value", + zone="zone_value", + endpoint="endpoint_value", + initial_cluster_version="initial_cluster_version_value", + current_master_version="current_master_version_value", + current_node_version="current_node_version_value", + create_time="create_time_value", + status=cluster_service.Cluster.Status.PROVISIONING, + status_message="status_message_value", + node_ipv4_cidr_size=1955, + services_ipv4_cidr="services_ipv4_cidr_value", + instance_group_urls=["instance_group_urls_value"], + current_node_count=1936, + expire_time="expire_time_value", + location="location_value", + enable_tpu=True, + tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", + id="id_value", + etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -def test_cluster_manager_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.container_v1.services.cluster_manager.transports.ClusterManagerTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ClusterManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_clusters", - "get_cluster", - "create_cluster", - "update_cluster", - "update_node_pool", - "set_node_pool_autoscaling", - "set_logging_service", - "set_monitoring_service", - "set_addons_config", - "set_locations", - "update_master", - "set_master_auth", - "delete_cluster", - "list_operations", - "get_operation", - "cancel_operation", - "get_server_config", - "get_json_web_keys", - "list_node_pools", - "get_node_pool", - "create_node_pool", - "delete_node_pool", + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Cluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.initial_node_count == 1911 + assert response.logging_service == "logging_service_value" + assert response.monitoring_service == "monitoring_service_value" + assert response.network == "network_value" + assert response.cluster_ipv4_cidr == "cluster_ipv4_cidr_value" + assert response.subnetwork == "subnetwork_value" + assert response.locations == ["locations_value"] + assert response.enable_kubernetes_alpha is True + assert response.label_fingerprint == "label_fingerprint_value" + assert response.self_link == "self_link_value" + assert response.zone == "zone_value" + assert response.endpoint == "endpoint_value" + assert response.initial_cluster_version == "initial_cluster_version_value" + assert response.current_master_version == "current_master_version_value" + assert response.current_node_version == "current_node_version_value" + assert response.create_time == "create_time_value" + assert response.status == cluster_service.Cluster.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.node_ipv4_cidr_size == 1955 + assert response.services_ipv4_cidr == "services_ipv4_cidr_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.current_node_count == 1936 + assert response.expire_time == "expire_time_value" + assert response.location == "location_value" + assert response.enable_tpu is True + assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + + +def test_get_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.GetClusterRequest.pb( + cluster_service.GetClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Cluster.to_json( + cluster_service.Cluster() + ) + + request = cluster_service.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=cluster_service.GetClusterRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + cluster_service.GetClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +def test_get_cluster_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_create_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + + request = {} + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cluster_rest_required_fields( + request_type=cluster_service.CreateClusterRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("cluster",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.CreateClusterRequest.pb( + cluster_service.CreateClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=cluster_service.CreateClusterRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + cluster_service.CreateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + parent="parent_value", + ) + + +def test_create_cluster_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + + request = {} + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_cluster_rest_required_fields( + request_type=cluster_service.UpdateClusterRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("update",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.UpdateClusterRequest.pb( + cluster_service.UpdateClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=cluster_service.UpdateClusterRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + cluster_service.UpdateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + name="name_value", + ) + + +def test_update_cluster_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateNodePoolRequest, + dict, + ], +) +def test_update_node_pool_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_node_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_node_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_node_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_node_pool + ] = mock_rpc + + request = {} + client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_node_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_node_pool_rest_required_fields( + request_type=cluster_service.UpdateNodePoolRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["node_version"] = "" + request_init["image_type"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_node_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeVersion"] = "node_version_value" + jsonified_request["imageType"] = "image_type_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_node_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeVersion" in jsonified_request + assert jsonified_request["nodeVersion"] == "node_version_value" + assert "imageType" in jsonified_request + assert jsonified_request["imageType"] == "image_type_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_node_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_node_pool_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "nodeVersion", + "imageType", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_node_pool_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_update_node_pool" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_update_node_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.UpdateNodePoolRequest.pb( + cluster_service.UpdateNodePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.UpdateNodePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.update_node_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_node_pool_rest_bad_request( + transport: str = "rest", request_type=cluster_service.UpdateNodePoolRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_node_pool(request) + + +def test_update_node_pool_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolAutoscalingRequest, + dict, + ], +) +def test_set_node_pool_autoscaling_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_node_pool_autoscaling(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_autoscaling_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_node_pool_autoscaling + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_node_pool_autoscaling + ] = mock_rpc + + request = {} + client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_node_pool_autoscaling(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_node_pool_autoscaling_rest_required_fields( + request_type=cluster_service.SetNodePoolAutoscalingRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_node_pool_autoscaling._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_node_pool_autoscaling._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_node_pool_autoscaling(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_node_pool_autoscaling_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_node_pool_autoscaling._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("autoscaling",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_node_pool_autoscaling_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_node_pool_autoscaling" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_node_pool_autoscaling" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetNodePoolAutoscalingRequest.pb( + cluster_service.SetNodePoolAutoscalingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetNodePoolAutoscalingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_node_pool_autoscaling( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_node_pool_autoscaling_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetNodePoolAutoscalingRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_node_pool_autoscaling(request) + + +def test_set_node_pool_autoscaling_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLoggingServiceRequest, + dict, + ], +) +def test_set_logging_service_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_logging_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_logging_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_logging_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_logging_service + ] = mock_rpc + + request = {} + client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_logging_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_logging_service_rest_required_fields( + request_type=cluster_service.SetLoggingServiceRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["logging_service"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_logging_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["loggingService"] = "logging_service_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_logging_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "loggingService" in jsonified_request + assert jsonified_request["loggingService"] == "logging_service_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_logging_service(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_logging_service_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_logging_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("loggingService",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_logging_service_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_logging_service" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_logging_service" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetLoggingServiceRequest.pb( + cluster_service.SetLoggingServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetLoggingServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_logging_service( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_logging_service_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetLoggingServiceRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_logging_service(request) + + +def test_set_logging_service_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_logging_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setLogging" + % client.transport._host, + args[1], + ) + + +def test_set_logging_service_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_logging_service( + cluster_service.SetLoggingServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + name="name_value", + ) + + +def test_set_logging_service_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMonitoringServiceRequest, + dict, + ], +) +def test_set_monitoring_service_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_monitoring_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_monitoring_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_monitoring_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_monitoring_service + ] = mock_rpc + + request = {} + client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_monitoring_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_monitoring_service_rest_required_fields( + request_type=cluster_service.SetMonitoringServiceRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["monitoring_service"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_monitoring_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["monitoringService"] = "monitoring_service_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_monitoring_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "monitoringService" in jsonified_request + assert jsonified_request["monitoringService"] == "monitoring_service_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_monitoring_service(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_monitoring_service_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_monitoring_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("monitoringService",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_monitoring_service_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_monitoring_service" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_monitoring_service" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetMonitoringServiceRequest.pb( + cluster_service.SetMonitoringServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetMonitoringServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_monitoring_service( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_monitoring_service_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetMonitoringServiceRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_monitoring_service(request) + + +def test_set_monitoring_service_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_monitoring_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setMonitoring" + % client.transport._host, + args[1], + ) + + +def test_set_monitoring_service_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_monitoring_service( + cluster_service.SetMonitoringServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + name="name_value", + ) + + +def test_set_monitoring_service_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetAddonsConfigRequest, + dict, + ], +) +def test_set_addons_config_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_addons_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_addons_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_addons_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_addons_config + ] = mock_rpc + + request = {} + client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_addons_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_addons_config_rest_required_fields( + request_type=cluster_service.SetAddonsConfigRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_addons_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_addons_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_addons_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_addons_config_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_addons_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("addonsConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_addons_config_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_addons_config" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_addons_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetAddonsConfigRequest.pb( + cluster_service.SetAddonsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetAddonsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_addons_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_addons_config_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetAddonsConfigRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_addons_config(request) + + +def test_set_addons_config_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_addons_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setAddons" + % client.transport._host, + args[1], + ) + + +def test_set_addons_config_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_addons_config( + cluster_service.SetAddonsConfigRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + name="name_value", + ) + + +def test_set_addons_config_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLocationsRequest, + dict, + ], +) +def test_set_locations_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_locations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_locations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_locations] = mock_rpc + + request = {} + client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_locations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_locations_rest_required_fields( + request_type=cluster_service.SetLocationsRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["locations"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_locations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["locations"] = "locations_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_locations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "locations" in jsonified_request + assert jsonified_request["locations"] == "locations_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_locations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_locations_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_locations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("locations",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_locations_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_locations" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_locations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetLocationsRequest.pb( + cluster_service.SetLocationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetLocationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_locations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_locations_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetLocationsRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_locations(request) + + +def test_set_locations_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_locations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setLocations" + % client.transport._host, + args[1], + ) + + +def test_set_locations_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_locations( + cluster_service.SetLocationsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + name="name_value", + ) + + +def test_set_locations_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateMasterRequest, + dict, + ], +) +def test_update_master_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_master(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_master_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_master in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_master] = mock_rpc + + request = {} + client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_master(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_master_rest_required_fields( + request_type=cluster_service.UpdateMasterRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["master_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_master._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["masterVersion"] = "master_version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_master._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "masterVersion" in jsonified_request + assert jsonified_request["masterVersion"] == "master_version_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_master(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_master_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_master._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("masterVersion",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_master_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_update_master" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_update_master" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.UpdateMasterRequest.pb( + cluster_service.UpdateMasterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.UpdateMasterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.update_master( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_master_rest_bad_request( + transport: str = "rest", request_type=cluster_service.UpdateMasterRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_master(request) + + +def test_update_master_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_master(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:updateMaster" + % client.transport._host, + args[1], + ) + + +def test_update_master_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_master( + cluster_service.UpdateMasterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + name="name_value", + ) + + +def test_update_master_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMasterAuthRequest, + dict, + ], +) +def test_set_master_auth_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_master_auth(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_master_auth_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_master_auth in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_master_auth] = mock_rpc + + request = {} + client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_master_auth(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_master_auth_rest_required_fields( + request_type=cluster_service.SetMasterAuthRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_master_auth._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_master_auth._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_master_auth(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_master_auth_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_master_auth._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "action", + "update", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_master_auth_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_master_auth" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_master_auth" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetMasterAuthRequest.pb( + cluster_service.SetMasterAuthRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetMasterAuthRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_master_auth( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_master_auth_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetMasterAuthRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_master_auth(request) + + +def test_set_master_auth_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_delete_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + + request = {} + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.DeleteClusterRequest.pb( + cluster_service.DeleteClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=cluster_service.DeleteClusterRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + cluster_service.DeleteClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +def test_delete_cluster_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListOperationsResponse( + missing_zones=["missing_zones_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListOperationsResponse) + assert response.missing_zones == ["missing_zones_value"] + + +def test_list_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_operations] = mock_rpc + + request = {} + client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_operations_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_operations" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_list_operations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.ListOperationsRequest.pb( + cluster_service.ListOperationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.ListOperationsResponse.to_json( + cluster_service.ListOperationsResponse() + ) + + request = cluster_service.ListOperationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.ListOperationsResponse() + + client.list_operations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=cluster_service.ListOperationsRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +def test_list_operations_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/operations" % client.transport._host, + args[1], + ) + + +def test_list_operations_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_operations( + cluster_service.ListOperationsRequest(), + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + + +def test_list_operations_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_get_operation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_operation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_operation] = mock_rpc + + request = {} + client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_operation_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_operation" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_get_operation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.GetOperationRequest.pb( + cluster_service.GetOperationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.GetOperationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.get_operation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=cluster_service.GetOperationRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +def test_get_operation_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/operations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_operation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/operations/*}" % client.transport._host, + args[1], + ) + + +def test_get_operation_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_operation( + cluster_service.GetOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + +def test_get_operation_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_operation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_operation + ] = mock_rpc + + request = {} + client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_operation_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_cancel_operation" + ) as pre: + pre.assert_not_called() + pb_message = cluster_service.CancelOperationRequest.pb( + cluster_service.CancelOperationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cluster_service.CancelOperationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_operation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=cluster_service.CancelOperationRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +def test_cancel_operation_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/operations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel_operation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/operations/*}:cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_operation_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_operation( + cluster_service.CancelOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + +def test_cancel_operation_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetServerConfigRequest, + dict, + ], +) +def test_get_server_config_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ServerConfig( + default_cluster_version="default_cluster_version_value", + valid_node_versions=["valid_node_versions_value"], + default_image_type="default_image_type_value", + valid_image_types=["valid_image_types_value"], + valid_master_versions=["valid_master_versions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ServerConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_server_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ServerConfig) + assert response.default_cluster_version == "default_cluster_version_value" + assert response.valid_node_versions == ["valid_node_versions_value"] + assert response.default_image_type == "default_image_type_value" + assert response.valid_image_types == ["valid_image_types_value"] + assert response.valid_master_versions == ["valid_master_versions_value"] + + +def test_get_server_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_server_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_server_config + ] = mock_rpc + + request = {} + client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_server_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_server_config_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_server_config" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_get_server_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.GetServerConfigRequest.pb( + cluster_service.GetServerConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.ServerConfig.to_json( + cluster_service.ServerConfig() + ) + + request = cluster_service.GetServerConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.ServerConfig() + + client.get_server_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_server_config_rest_bad_request( + transport: str = "rest", request_type=cluster_service.GetServerConfigRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_server_config(request) + + +def test_get_server_config_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ServerConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ServerConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_server_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}/serverConfig" % client.transport._host, + args[1], + ) + + +def test_get_server_config_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_server_config( + cluster_service.GetServerConfigRequest(), + project_id="project_id_value", + zone="zone_value", + name="name_value", + ) + + +def test_get_server_config_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetJSONWebKeysRequest, + dict, + ], +) +def test_get_json_web_keys_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.GetJSONWebKeysResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.GetJSONWebKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_json_web_keys(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.GetJSONWebKeysResponse) + + +def test_get_json_web_keys_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_json_web_keys in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_json_web_keys + ] = mock_rpc + + request = {} + client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_json_web_keys(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_json_web_keys_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_json_web_keys" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_get_json_web_keys" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.GetJSONWebKeysRequest.pb( + cluster_service.GetJSONWebKeysRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.GetJSONWebKeysResponse.to_json( + cluster_service.GetJSONWebKeysResponse() + ) + + request = cluster_service.GetJSONWebKeysRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.GetJSONWebKeysResponse() + + client.get_json_web_keys( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_json_web_keys_rest_bad_request( + transport: str = "rest", request_type=cluster_service.GetJSONWebKeysRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_json_web_keys(request) + + +def test_get_json_web_keys_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListNodePoolsRequest, + dict, + ], +) +def test_list_node_pools_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListNodePoolsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListNodePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_node_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListNodePoolsResponse) + + +def test_list_node_pools_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_node_pools in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_node_pools] = mock_rpc + + request = {} + client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_node_pools(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_node_pools_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_node_pools" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_list_node_pools" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.ListNodePoolsRequest.pb( + cluster_service.ListNodePoolsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.ListNodePoolsResponse.to_json( + cluster_service.ListNodePoolsResponse() + ) + + request = cluster_service.ListNodePoolsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.ListNodePoolsResponse() + + client.list_node_pools( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_node_pools_rest_bad_request( + transport: str = "rest", request_type=cluster_service.ListNodePoolsRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_node_pools(request) + + +def test_list_node_pools_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListNodePoolsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListNodePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_node_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/nodePools" + % client.transport._host, + args[1], + ) + + +def test_list_node_pools_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_node_pools( + cluster_service.ListNodePoolsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + parent="parent_value", + ) + + +def test_list_node_pools_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetNodePoolRequest, + dict, + ], +) +def test_get_node_pool_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.NodePool( + name="name_value", + initial_node_count=1911, + locations=["locations_value"], + self_link="self_link_value", + version="version_value", + instance_group_urls=["instance_group_urls_value"], + status=cluster_service.NodePool.Status.PROVISIONING, + status_message="status_message_value", + pod_ipv4_cidr_size=1856, + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.NodePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_node_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.NodePool) + assert response.name == "name_value" + assert response.initial_node_count == 1911 + assert response.locations == ["locations_value"] + assert response.self_link == "self_link_value" + assert response.version == "version_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.status == cluster_service.NodePool.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.pod_ipv4_cidr_size == 1856 + assert response.etag == "etag_value" + + +def test_get_node_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_node_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_node_pool] = mock_rpc + + request = {} + client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_node_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_node_pool_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_node_pool" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_get_node_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.GetNodePoolRequest.pb( + cluster_service.GetNodePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.NodePool.to_json( + cluster_service.NodePool() + ) + + request = cluster_service.GetNodePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.NodePool() + + client.get_node_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_node_pool_rest_bad_request( + transport: str = "rest", request_type=cluster_service.GetNodePoolRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_node_pool(request) + + +def test_get_node_pool_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.NodePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.NodePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_node_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}" + % client.transport._host, + args[1], + ) + + +def test_get_node_pool_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node_pool( + cluster_service.GetNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +def test_get_node_pool_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CreateNodePoolRequest, + dict, + ], +) +def test_create_node_pool_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_node_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_create_node_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_node_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_node_pool + ] = mock_rpc + + request = {} + client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_node_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_node_pool_rest_required_fields( + request_type=cluster_service.CreateNodePoolRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_node_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_node_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_node_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_node_pool_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("nodePool",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_node_pool_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_create_node_pool" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_create_node_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.CreateNodePoolRequest.pb( + cluster_service.CreateNodePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.CreateNodePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.create_node_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_node_pool_rest_bad_request( + transport: str = "rest", request_type=cluster_service.CreateNodePoolRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_node_pool(request) + + +def test_create_node_pool_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_node_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/nodePools" + % client.transport._host, + args[1], + ) + + +def test_create_node_pool_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_node_pool( + cluster_service.CreateNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + parent="parent_value", + ) + + +def test_create_node_pool_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.DeleteNodePoolRequest, + dict, + ], +) +def test_delete_node_pool_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_node_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_delete_node_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_node_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_node_pool + ] = mock_rpc + + request = {} + client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_node_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_node_pool_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_delete_node_pool" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_delete_node_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.DeleteNodePoolRequest.pb( + cluster_service.DeleteNodePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.DeleteNodePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.delete_node_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_node_pool_rest_bad_request( + transport: str = "rest", request_type=cluster_service.DeleteNodePoolRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_node_pool(request) + + +def test_delete_node_pool_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_node_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_node_pool_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_node_pool( + cluster_service.DeleteNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +def test_delete_node_pool_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CompleteNodePoolUpgradeRequest, + dict, + ], +) +def test_complete_node_pool_upgrade_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.complete_node_pool_upgrade(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_complete_node_pool_upgrade_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.complete_node_pool_upgrade + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.complete_node_pool_upgrade + ] = mock_rpc + + request = {} + client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.complete_node_pool_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_complete_node_pool_upgrade_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_complete_node_pool_upgrade" + ) as pre: + pre.assert_not_called() + pb_message = cluster_service.CompleteNodePoolUpgradeRequest.pb( + cluster_service.CompleteNodePoolUpgradeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cluster_service.CompleteNodePoolUpgradeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.complete_node_pool_upgrade( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_complete_node_pool_upgrade_rest_bad_request( + transport: str = "rest", request_type=cluster_service.CompleteNodePoolUpgradeRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.complete_node_pool_upgrade(request) + + +def test_complete_node_pool_upgrade_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.RollbackNodePoolUpgradeRequest, + dict, + ], +) +def test_rollback_node_pool_upgrade_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rollback_node_pool_upgrade(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_rollback_node_pool_upgrade_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.rollback_node_pool_upgrade + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.rollback_node_pool_upgrade + ] = mock_rpc + + request = {} + client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback_node_pool_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_node_pool_upgrade_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_rollback_node_pool_upgrade" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_rollback_node_pool_upgrade" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.RollbackNodePoolUpgradeRequest.pb( + cluster_service.RollbackNodePoolUpgradeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.RollbackNodePoolUpgradeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.rollback_node_pool_upgrade( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_node_pool_upgrade_rest_bad_request( + transport: str = "rest", request_type=cluster_service.RollbackNodePoolUpgradeRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rollback_node_pool_upgrade(request) + + +def test_rollback_node_pool_upgrade_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rollback_node_pool_upgrade(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/nodePools/*}:rollback" + % client.transport._host, + args[1], + ) + + +def test_rollback_node_pool_upgrade_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_node_pool_upgrade( + cluster_service.RollbackNodePoolUpgradeRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +def test_rollback_node_pool_upgrade_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolManagementRequest, + dict, + ], +) +def test_set_node_pool_management_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_node_pool_management(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_management_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_node_pool_management + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_node_pool_management + ] = mock_rpc + + request = {} + client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_node_pool_management(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_node_pool_management_rest_required_fields( + request_type=cluster_service.SetNodePoolManagementRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_node_pool_management._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_node_pool_management._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_node_pool_management(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_node_pool_management_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_node_pool_management._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("management",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_node_pool_management_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_node_pool_management" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_node_pool_management" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetNodePoolManagementRequest.pb( + cluster_service.SetNodePoolManagementRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetNodePoolManagementRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_node_pool_management( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_node_pool_management_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetNodePoolManagementRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_node_pool_management(request) + + +def test_set_node_pool_management_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLabelsRequest, + dict, + ], +) +def test_set_labels_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_labels_rest_required_fields(request_type=cluster_service.SetLabelsRequest): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["label_fingerprint"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["labelFingerprint"] = "label_fingerprint_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "labelFingerprint" in jsonified_request + assert jsonified_request["labelFingerprint"] == "label_fingerprint_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resourceLabels", + "labelFingerprint", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_labels" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_labels" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetLabelsRequest.pb( + cluster_service.SetLabelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetLabelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_labels( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetLabelsRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLegacyAbacRequest, + dict, + ], +) +def test_set_legacy_abac_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_legacy_abac(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_legacy_abac_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_legacy_abac in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_legacy_abac] = mock_rpc + + request = {} + client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_legacy_abac(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_legacy_abac_rest_required_fields( + request_type=cluster_service.SetLegacyAbacRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["enabled"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_legacy_abac._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["enabled"] = True + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_legacy_abac._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "enabled" in jsonified_request + assert jsonified_request["enabled"] == True + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_legacy_abac(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_legacy_abac_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_legacy_abac._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("enabled",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_legacy_abac_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_legacy_abac" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_legacy_abac" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetLegacyAbacRequest.pb( + cluster_service.SetLegacyAbacRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetLegacyAbacRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_legacy_abac( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_legacy_abac_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetLegacyAbacRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_legacy_abac(request) + + +def test_set_legacy_abac_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_legacy_abac(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setLegacyAbac" + % client.transport._host, + args[1], + ) + + +def test_set_legacy_abac_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_legacy_abac( + cluster_service.SetLegacyAbacRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + name="name_value", + ) + + +def test_set_legacy_abac_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.StartIPRotationRequest, + dict, + ], +) +def test_start_ip_rotation_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_ip_rotation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_start_ip_rotation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.start_ip_rotation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_ip_rotation + ] = mock_rpc + + request = {} + client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.start_ip_rotation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_ip_rotation_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_start_ip_rotation" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_start_ip_rotation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.StartIPRotationRequest.pb( + cluster_service.StartIPRotationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.StartIPRotationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.start_ip_rotation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_ip_rotation_rest_bad_request( + transport: str = "rest", request_type=cluster_service.StartIPRotationRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_ip_rotation(request) + + +def test_start_ip_rotation_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.start_ip_rotation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:startIpRotation" + % client.transport._host, + args[1], + ) + + +def test_start_ip_rotation_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_ip_rotation( + cluster_service.StartIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +def test_start_ip_rotation_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CompleteIPRotationRequest, + dict, + ], +) +def test_complete_ip_rotation_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.complete_ip_rotation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_complete_ip_rotation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.complete_ip_rotation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.complete_ip_rotation + ] = mock_rpc + + request = {} + client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.complete_ip_rotation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_complete_ip_rotation_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_complete_ip_rotation" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_complete_ip_rotation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.CompleteIPRotationRequest.pb( + cluster_service.CompleteIPRotationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.CompleteIPRotationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.complete_ip_rotation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_complete_ip_rotation_rest_bad_request( + transport: str = "rest", request_type=cluster_service.CompleteIPRotationRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.complete_ip_rotation(request) + + +def test_complete_ip_rotation_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.complete_ip_rotation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:completeIpRotation" + % client.transport._host, + args[1], + ) + + +def test_complete_ip_rotation_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.complete_ip_rotation( + cluster_service.CompleteIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +def test_complete_ip_rotation_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolSizeRequest, + dict, + ], +) +def test_set_node_pool_size_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_node_pool_size(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_size_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_node_pool_size in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_node_pool_size + ] = mock_rpc + + request = {} + client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_node_pool_size(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_node_pool_size_rest_required_fields( + request_type=cluster_service.SetNodePoolSizeRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["node_count"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_node_pool_size._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeCount"] = 1070 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_node_pool_size._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeCount" in jsonified_request + assert jsonified_request["nodeCount"] == 1070 + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_node_pool_size(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_node_pool_size_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_node_pool_size._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("nodeCount",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_node_pool_size_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_node_pool_size" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_node_pool_size" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetNodePoolSizeRequest.pb( + cluster_service.SetNodePoolSizeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetNodePoolSizeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_node_pool_size( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_node_pool_size_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetNodePoolSizeRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/nodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_node_pool_size(request) + + +def test_set_node_pool_size_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNetworkPolicyRequest, + dict, + ], +) +def test_set_network_policy_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_network_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_network_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_network_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_network_policy + ] = mock_rpc + + request = {} + client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_network_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_network_policy_rest_required_fields( + request_type=cluster_service.SetNetworkPolicyRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_network_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_network_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_network_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_network_policy_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_network_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("networkPolicy",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_network_policy_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_network_policy" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_network_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetNetworkPolicyRequest.pb( + cluster_service.SetNetworkPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetNetworkPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_network_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_network_policy_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetNetworkPolicyRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_network_policy(request) + + +def test_set_network_policy_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_network_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setNetworkPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_network_policy_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_network_policy( + cluster_service.SetNetworkPolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + name="name_value", + ) + + +def test_set_network_policy_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMaintenancePolicyRequest, + dict, + ], +) +def test_set_maintenance_policy_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_maintenance_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_maintenance_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_maintenance_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_maintenance_policy + ] = mock_rpc + + request = {} + client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_maintenance_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_maintenance_policy_rest_required_fields( + request_type=cluster_service.SetMaintenancePolicyRequest, +): + transport_class = transports.ClusterManagerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["zone"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_maintenance_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["zone"] = "zone_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_maintenance_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_maintenance_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_maintenance_policy_rest_unset_required_fields(): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_maintenance_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "zone", + "clusterId", + "maintenancePolicy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_maintenance_policy_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_maintenance_policy" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_set_maintenance_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.SetMaintenancePolicyRequest.pb( + cluster_service.SetMaintenancePolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cluster_service.Operation.to_json( + cluster_service.Operation() + ) + + request = cluster_service.SetMaintenancePolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.Operation() + + client.set_maintenance_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_maintenance_policy_rest_bad_request( + transport: str = "rest", request_type=cluster_service.SetMaintenancePolicyRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_maintenance_policy(request) + + +def test_set_maintenance_policy_rest_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_maintenance_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}:setMaintenancePolicy" + % client.transport._host, + args[1], + ) + + +def test_set_maintenance_policy_rest_flattened_error(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_maintenance_policy( + cluster_service.SetMaintenancePolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + name="name_value", + ) + + +def test_set_maintenance_policy_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListUsableSubnetworksRequest, + dict, + ], +) +def test_list_usable_subnetworks_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.ListUsableSubnetworksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.ListUsableSubnetworksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_usable_subnetworks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableSubnetworksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_usable_subnetworks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_usable_subnetworks + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_usable_subnetworks + ] = mock_rpc + + request = {} + client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_usable_subnetworks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_subnetworks_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_usable_subnetworks" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_list_usable_subnetworks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.ListUsableSubnetworksRequest.pb( + cluster_service.ListUsableSubnetworksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + cluster_service.ListUsableSubnetworksResponse.to_json( + cluster_service.ListUsableSubnetworksResponse() + ) + ) + + request = cluster_service.ListUsableSubnetworksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.ListUsableSubnetworksResponse() + + client.list_usable_subnetworks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_usable_subnetworks_rest_bad_request( + transport: str = "rest", request_type=cluster_service.ListUsableSubnetworksRequest +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable_subnetworks(request) + + +def test_list_usable_subnetworks_rest_pager(transport: str = "rest"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cluster_service.ListUsableSubnetworksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_usable_subnetworks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cluster_service.UsableSubnetwork) for i in results) + + pages = list(client.list_usable_subnetworks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CheckAutopilotCompatibilityRequest, + dict, + ], +) +def test_check_autopilot_compatibility_rest(request_type): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cluster_service.CheckAutopilotCompatibilityResponse( + summary="summary_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cluster_service.CheckAutopilotCompatibilityResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_autopilot_compatibility(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.CheckAutopilotCompatibilityResponse) + assert response.summary == "summary_value" + + +def test_check_autopilot_compatibility_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.check_autopilot_compatibility + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.check_autopilot_compatibility + ] = mock_rpc + + request = {} + client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_autopilot_compatibility(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_autopilot_compatibility_rest_interceptors(null_interceptor): + transport = transports.ClusterManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterManagerRestInterceptor(), + ) + client = ClusterManagerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_check_autopilot_compatibility" + ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "pre_check_autopilot_compatibility" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cluster_service.CheckAutopilotCompatibilityRequest.pb( + cluster_service.CheckAutopilotCompatibilityRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + cluster_service.CheckAutopilotCompatibilityResponse.to_json( + cluster_service.CheckAutopilotCompatibilityResponse() + ) + ) + + request = cluster_service.CheckAutopilotCompatibilityRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cluster_service.CheckAutopilotCompatibilityResponse() + + client.check_autopilot_compatibility( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_autopilot_compatibility_rest_bad_request( + transport: str = "rest", + request_type=cluster_service.CheckAutopilotCompatibilityRequest, +): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_autopilot_compatibility(request) + + +def test_check_autopilot_compatibility_rest_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ClusterManagerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ClusterManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + transports.ClusterManagerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ClusterManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ClusterManagerGrpcTransport, + ) + + +def test_cluster_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ClusterManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cluster_manager_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.container_v1.services.cluster_manager.transports.ClusterManagerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ClusterManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "update_node_pool", + "set_node_pool_autoscaling", + "set_logging_service", + "set_monitoring_service", + "set_addons_config", + "set_locations", + "update_master", + "set_master_auth", + "delete_cluster", + "list_operations", + "get_operation", + "cancel_operation", + "get_server_config", + "get_json_web_keys", + "list_node_pools", + "get_node_pool", + "create_node_pool", + "delete_node_pool", "complete_node_pool_upgrade", "rollback_node_pool_upgrade", "set_node_pool_management", @@ -16024,6 +25287,7 @@ def test_cluster_manager_transport_auth_adc(transport_class): [ transports.ClusterManagerGrpcTransport, transports.ClusterManagerGrpcAsyncIOTransport, + transports.ClusterManagerRestTransport, ], ) def test_cluster_manager_transport_auth_gdch_credentials(transport_class): @@ -16121,11 +25385,23 @@ def test_cluster_manager_grpc_transport_client_cert_source_for_mtls(transport_cl ) +def test_cluster_manager_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ClusterManagerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cluster_manager_host_no_port(transport_name): @@ -16136,7 +25412,11 @@ def test_cluster_manager_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("container.googleapis.com:443") + assert client.transport._host == ( + "container.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://container.googleapis.com" + ) @pytest.mark.parametrize( @@ -16144,6 +25424,7 @@ def test_cluster_manager_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cluster_manager_host_with_port(transport_name): @@ -16154,7 +25435,132 @@ def test_cluster_manager_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("container.googleapis.com:8000") + assert client.transport._host == ( + "container.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://container.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cluster_manager_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ClusterManagerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ClusterManagerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_clusters._session + session2 = client2.transport.list_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cluster._session + session2 = client2.transport.get_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cluster._session + session2 = client2.transport.create_cluster._session + assert session1 != session2 + session1 = client1.transport.update_cluster._session + session2 = client2.transport.update_cluster._session + assert session1 != session2 + session1 = client1.transport.update_node_pool._session + session2 = client2.transport.update_node_pool._session + assert session1 != session2 + session1 = client1.transport.set_node_pool_autoscaling._session + session2 = client2.transport.set_node_pool_autoscaling._session + assert session1 != session2 + session1 = client1.transport.set_logging_service._session + session2 = client2.transport.set_logging_service._session + assert session1 != session2 + session1 = client1.transport.set_monitoring_service._session + session2 = client2.transport.set_monitoring_service._session + assert session1 != session2 + session1 = client1.transport.set_addons_config._session + session2 = client2.transport.set_addons_config._session + assert session1 != session2 + session1 = client1.transport.set_locations._session + session2 = client2.transport.set_locations._session + assert session1 != session2 + session1 = client1.transport.update_master._session + session2 = client2.transport.update_master._session + assert session1 != session2 + session1 = client1.transport.set_master_auth._session + session2 = client2.transport.set_master_auth._session + assert session1 != session2 + session1 = client1.transport.delete_cluster._session + session2 = client2.transport.delete_cluster._session + assert session1 != session2 + session1 = client1.transport.list_operations._session + session2 = client2.transport.list_operations._session + assert session1 != session2 + session1 = client1.transport.get_operation._session + session2 = client2.transport.get_operation._session + assert session1 != session2 + session1 = client1.transport.cancel_operation._session + session2 = client2.transport.cancel_operation._session + assert session1 != session2 + session1 = client1.transport.get_server_config._session + session2 = client2.transport.get_server_config._session + assert session1 != session2 + session1 = client1.transport.get_json_web_keys._session + session2 = client2.transport.get_json_web_keys._session + assert session1 != session2 + session1 = client1.transport.list_node_pools._session + session2 = client2.transport.list_node_pools._session + assert session1 != session2 + session1 = client1.transport.get_node_pool._session + session2 = client2.transport.get_node_pool._session + assert session1 != session2 + session1 = client1.transport.create_node_pool._session + session2 = client2.transport.create_node_pool._session + assert session1 != session2 + session1 = client1.transport.delete_node_pool._session + session2 = client2.transport.delete_node_pool._session + assert session1 != session2 + session1 = client1.transport.complete_node_pool_upgrade._session + session2 = client2.transport.complete_node_pool_upgrade._session + assert session1 != session2 + session1 = client1.transport.rollback_node_pool_upgrade._session + session2 = client2.transport.rollback_node_pool_upgrade._session + assert session1 != session2 + session1 = client1.transport.set_node_pool_management._session + session2 = client2.transport.set_node_pool_management._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.set_legacy_abac._session + session2 = client2.transport.set_legacy_abac._session + assert session1 != session2 + session1 = client1.transport.start_ip_rotation._session + session2 = client2.transport.start_ip_rotation._session + assert session1 != session2 + session1 = client1.transport.complete_ip_rotation._session + session2 = client2.transport.complete_ip_rotation._session + assert session1 != session2 + session1 = client1.transport.set_node_pool_size._session + session2 = client2.transport.set_node_pool_size._session + assert session1 != session2 + session1 = client1.transport.set_network_policy._session + session2 = client2.transport.set_network_policy._session + assert session1 != session2 + session1 = client1.transport.set_maintenance_policy._session + session2 = client2.transport.set_maintenance_policy._session + assert session1 != session2 + session1 = client1.transport.list_usable_subnetworks._session + session2 = client2.transport.list_usable_subnetworks._session + assert session1 != session2 + session1 = client1.transport.check_autopilot_compatibility._session + session2 = client2.transport.check_autopilot_compatibility._session + assert session1 != session2 def test_cluster_manager_grpc_transport_channel(): @@ -16448,6 +25854,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -16465,6 +25872,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py index 2b12077d2497..e5f733d6b57b 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py @@ -1534,6 +1534,8 @@ def test_get_cluster(request_type, transport: str = "grpc"): tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", id="id_value", etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_cluster(request) @@ -1577,6 +1579,8 @@ def test_get_cluster(request_type, transport: str = "grpc"): assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" assert response.id == "id_value" assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_cluster_empty_call(): @@ -1713,6 +1717,8 @@ async def test_get_cluster_empty_call_async(): tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", id="id_value", etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_cluster() @@ -1817,6 +1823,8 @@ async def test_get_cluster_async( tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", id="id_value", etag="etag_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_cluster(request) @@ -1861,6 +1869,8 @@ async def test_get_cluster_async( assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" assert response.id == "id_value" assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -15415,13 +15425,13 @@ def test_list_usable_subnetworks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_usable_subnetworks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index 65cce189e366..3bfd84f79f9e 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -2016,13 +2016,13 @@ def test_list_linked_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_linked_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index 3edf242dde29..27a08007f2da 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -3246,13 +3246,13 @@ def test_list_document_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_document_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index f541dc6d87b0..fa61e27bf633 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -3139,13 +3139,13 @@ def test_search_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index ab3a0c7981b5..675688368551 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -3034,13 +3034,13 @@ def test_list_rule_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_rule_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index 26d1fe08ae31..a6086ddb231b 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -3154,13 +3154,13 @@ def test_list_synonym_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_synonym_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index f878163dcb65..ebc3e69779a6 100644 --- a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -1522,13 +1522,13 @@ def test_list_available_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_available_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2016,13 +2016,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py index ab04a7ad580b..8bbc818acc7d 100644 --- a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py +++ b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py @@ -3050,13 +3050,13 @@ def test_list_processes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5115,13 +5115,13 @@ def test_list_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6859,13 +6859,13 @@ def test_list_lineage_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_lineage_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7719,13 +7719,13 @@ def test_search_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8220,13 +8220,13 @@ def test_batch_search_link_processes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.batch_search_link_processes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 891cd20821fb..752ac0dd660a 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -1467,10 +1467,10 @@ def test_search_catalog_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_catalog(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3641,13 +3641,13 @@ def test_list_entry_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entry_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6004,13 +6004,13 @@ def test_list_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11971,13 +11971,13 @@ def test_list_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index 93b9bef905a8..99c59118f4c8 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -2682,13 +2682,13 @@ def test_list_taxonomies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_taxonomies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4844,13 +4844,13 @@ def test_list_policy_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_policy_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 1882fbb6fe0a..8c5faa7b8beb 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -1454,10 +1454,10 @@ def test_search_catalog_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_catalog(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3628,13 +3628,13 @@ def test_list_entry_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entry_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5965,13 +5965,13 @@ def test_list_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11313,13 +11313,13 @@ def test_list_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index c2c9b13bbc2b..2de7999c8b80 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -2682,13 +2682,13 @@ def test_list_taxonomies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_taxonomies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4844,13 +4844,13 @@ def test_list_policy_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_policy_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index 30fef4fb8c6c..869b5435fbdc 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -2482,8 +2482,8 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata( ( ("project_id", ""), @@ -2493,7 +2493,7 @@ def test_list_jobs_pager(transport_name: str = "grpc"): ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2988,13 +2988,13 @@ def test_aggregated_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) pager = client.aggregated_list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index ffcc9b321a35..f2ea42c90ec4 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -1498,8 +1498,8 @@ def test_list_job_messages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata( ( ("project_id", ""), @@ -1510,7 +1510,7 @@ def test_list_job_messages_pager(transport_name: str = "grpc"): ) pager = client.list_job_messages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index 6d526db8d812..941eb0e0c4a0 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -1762,8 +1762,8 @@ def test_get_job_execution_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata( ( ("project_id", ""), @@ -1774,7 +1774,7 @@ def test_get_job_execution_details_pager(transport_name: str = "grpc"): ) pager = client.get_job_execution_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2287,8 +2287,8 @@ def test_get_stage_execution_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata( ( ("project_id", ""), @@ -2300,7 +2300,7 @@ def test_get_stage_execution_details_pager(transport_name: str = "grpc"): ) pager = client.get_stage_execution_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py index 1af649bebb01..e87d7ddf1488 100644 --- a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py +++ b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py @@ -1508,13 +1508,13 @@ def test_list_repositories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_repositories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4237,13 +4237,13 @@ def test_query_repository_directory_contents_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.query_repository_directory_contents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4743,13 +4743,13 @@ def test_fetch_repository_history_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.fetch_repository_history(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5938,13 +5938,13 @@ def test_list_workspaces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workspaces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9886,13 +9886,13 @@ def test_query_directory_contents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("workspace", ""),)), ) pager = client.query_directory_contents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12445,13 +12445,13 @@ def test_list_release_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_release_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14681,13 +14681,13 @@ def test_list_compilation_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_compilation_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16007,13 +16007,13 @@ def test_query_compilation_result_actions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.query_compilation_result_actions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16604,13 +16604,13 @@ def test_list_workflow_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workflow_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -18830,13 +18830,13 @@ def test_list_workflow_invocations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workflow_invocations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20814,13 +20814,13 @@ def test_query_workflow_invocation_actions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.query_workflow_invocation_actions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py index 474a0cf26c11..63602aabe1a4 100644 --- a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py +++ b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -2366,13 +2366,13 @@ def test_list_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4472,13 +4472,13 @@ def test_list_data_items_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_items(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5505,13 +5505,13 @@ def test_list_annotated_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_annotated_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7959,13 +7959,13 @@ def test_list_examples_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_examples(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9392,13 +9392,13 @@ def test_list_annotation_spec_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_annotation_spec_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11167,13 +11167,13 @@ def test_list_instructions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instructions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12533,13 +12533,13 @@ def test_search_evaluations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_evaluations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13126,13 +13126,13 @@ def test_search_example_comparisons_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_example_comparisons(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16156,13 +16156,13 @@ def test_list_evaluation_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_evaluation_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataplex/CHANGELOG.md b/packages/google-cloud-dataplex/CHANGELOG.md index 377ed97615e7..a1bab522a07c 100644 --- a/packages/google-cloud-dataplex/CHANGELOG.md +++ b/packages/google-cloud-dataplex/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## [2.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.1.0...google-cloud-dataplex-v2.2.0) (2024-06-27) + + +### Features + +* [google-cloud-dataplex] expose data scan execution create time to customers ([#12846](https://github.com/googleapis/google-cloud-python/issues/12846)) ([2726a72](https://github.com/googleapis/google-cloud-python/commit/2726a721b0eecd05216fa018cce8d91407853187)) + +## [2.1.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.0.1...google-cloud-dataplex-v2.1.0) (2024-06-24) + + +### Features + +* exposing EntrySource.location field that contains location of a resource in the source system ([9264874](https://github.com/googleapis/google-cloud-python/commit/9264874e8ab6cff0a837a3afbba33848e6100fd8)) + + +### Documentation + +* Scrub descriptions for GenerateDataQualityRules ([9264874](https://github.com/googleapis/google-cloud-python/commit/9264874e8ab6cff0a837a3afbba33848e6100fd8)) + +## [2.0.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.0.0...google-cloud-dataplex-v2.0.1) (2024-06-19) + + +### Documentation + +* clarify DataQualityRule.sql_assertion descriptions ([74db0f8](https://github.com/googleapis/google-cloud-python/commit/74db0f812620a936fc055f49e0837aa30264fbda)) +* fix links to RuleType proto references ([74db0f8](https://github.com/googleapis/google-cloud-python/commit/74db0f812620a936fc055f49e0837aa30264fbda)) + ## [2.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v1.13.0...google-cloud-dataplex-v2.0.0) (2024-05-16) diff --git a/packages/google-cloud-dataplex/README.rst b/packages/google-cloud-dataplex/README.rst index 1f79646a30ed..4b0f591216f8 100644 --- a/packages/google-cloud-dataplex/README.rst +++ b/packages/google-cloud-dataplex/README.rst @@ -15,7 +15,7 @@ Python Client for Cloud Dataplex .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dataplex.svg :target: https://pypi.org/project/google-cloud-dataplex/ .. _Cloud Dataplex: https://cloud.google.com/dataplex -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dataplex/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dataplex/latest/summary_overview .. _Product Documentation: https://cloud.google.com/dataplex Quick Start diff --git a/packages/google-cloud-dataplex/docs/index.rst b/packages/google-cloud-dataplex/docs/index.rst index cab55ffbf585..367f229c54fb 100644 --- a/packages/google-cloud-dataplex/docs/index.rst +++ b/packages/google-cloud-dataplex/docs/index.rst @@ -21,3 +21,8 @@ For a list of all ``google-cloud-dataplex`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-dataplex/docs/summary_overview.md b/packages/google-cloud-dataplex/docs/summary_overview.md new file mode 100644 index 000000000000..2d84cb48a93b --- /dev/null +++ b/packages/google-cloud-dataplex/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Cloud Dataplex API + +Overview of the APIs available for Cloud Dataplex API. + +## All entries + +Classes, methods and properties & attributes for +Cloud Dataplex API. + +[classes](https://cloud.google.com/python/docs/reference/dataplex/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/dataplex/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/dataplex/latest/summary_property.html) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index f41fed841208..04f353585881 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.0.0" # {x-release-please-version} +__version__ = "2.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index f41fed841208..04f353585881 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.0.0" # {x-release-please-version} +__version__ = "2.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index 93c9d2b3f067..3c380f88905e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -1308,8 +1308,11 @@ async def generate_data_quality_rules( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datascans.GenerateDataQualityRulesResponse: - r"""Generates recommended DataQualityRule from a data - profiling DataScan. + r"""Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. .. code-block:: python @@ -1339,15 +1342,16 @@ async def sample_generate_data_quality_rules(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]]): - The request object. Generate recommended DataQualityRules - request. + The request object. Request details for generating data + quality rule recommendations. name (:class:`str`): - Required. The name should be either + Required. The name must be one of the following: - - the name of a datascan with at least one successful - completed data profiling job, or - - the name of a successful completed data profiling - datascan job. + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling + job (a data scan job where the job type is data + profiling) This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1360,8 +1364,8 @@ async def sample_generate_data_quality_rules(): Returns: google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: - Generate recommended DataQualityRules - response. + Response details for data quality + rule recommendations. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index ba6d09e94b59..d251517843b4 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -1746,8 +1746,11 @@ def generate_data_quality_rules( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datascans.GenerateDataQualityRulesResponse: - r"""Generates recommended DataQualityRule from a data - profiling DataScan. + r"""Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. .. code-block:: python @@ -1777,15 +1780,16 @@ def sample_generate_data_quality_rules(): Args: request (Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]): - The request object. Generate recommended DataQualityRules - request. + The request object. Request details for generating data + quality rule recommendations. name (str): - Required. The name should be either + Required. The name must be one of the following: - - the name of a datascan with at least one successful - completed data profiling job, or - - the name of a successful completed data profiling - datascan job. + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling + job (a data scan job where the job type is data + profiling) This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1798,8 +1802,8 @@ def sample_generate_data_quality_rules(): Returns: google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: - Generate recommended DataQualityRules - response. + Response details for data quality + rule recommendations. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py index d6a031505882..e8aa7a2ee003 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py @@ -474,8 +474,11 @@ def generate_data_quality_rules( ]: r"""Return a callable for the generate data quality rules method over gRPC. - Generates recommended DataQualityRule from a data - profiling DataScan. + Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. Returns: Callable[[~.GenerateDataQualityRulesRequest], diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py index fa437f3e76ee..64cb5a969382 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py @@ -491,8 +491,11 @@ def generate_data_quality_rules( ]: r"""Return a callable for the generate data quality rules method over gRPC. - Generates recommended DataQualityRule from a data - profiling DataScan. + Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. Returns: Callable[[~.GenerateDataQualityRulesRequest], diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index 8f680727ea7c..237fbc4a6b88 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -862,6 +862,13 @@ class EntrySource(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): The update time of the resource in the source system. + location (str): + Output only. Location of the resource in the + source system. Entry will be searchable by this + location. By default, this should match the + location of the EntryGroup containing this + entry. A different value allows capturing source + location for data external to GCP. """ class Ancestor(proto.Message): @@ -924,6 +931,10 @@ class Ancestor(proto.Message): number=11, message=timestamp_pb2.Timestamp, ) + location: str = proto.Field( + proto.STRING, + number=12, + ) class CreateEntryGroupRequest(proto.Message): @@ -1653,8 +1664,8 @@ class ListEntriesRequest(proto.Message): to be provided. Example filter expressions: "entry_source.display_name=AnExampleDisplayName" "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - "entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/* " "NOT + `"entry_type=projects/example-project/locations/us/entryTypes/a*` + OR `entry_type=projects/another-project/locations/*"` "NOT entry_source.display_name=AnotherExampleDisplayName". """ @@ -1848,7 +1859,7 @@ class SearchEntriesResult(proto.Message): linked_resource (str): Linked resource name. dataplex_entry (google.cloud.dataplex_v1.types.Entry): - Entry format of the result. + snippets (google.cloud.dataplex_v1.types.SearchEntriesResult.Snippets): Snippets. """ diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index 20474172f78b..b071fe7e6e2d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -384,9 +384,10 @@ class DataQualityRuleResult(proto.Message): rules. assertion_row_count (int): Output only. The number of rows returned by - the sql statement in the SqlAssertion rule. + the SQL statement in a SQL assertion rule. - This field is only valid for SqlAssertion rules. + This field is only valid for SQL assertion + rules. """ rule: "DataQualityRule" = proto.Field( @@ -534,7 +535,8 @@ class DataQualityRule(proto.Message): This field is a member of `oneof`_ ``rule_type``. sql_assertion (google.cloud.dataplex_v1.types.DataQualityRule.SqlAssertion): Aggregate rule which evaluates the number of - rows returned for the provided statement. + rows returned for the provided statement. If any + rows are returned, this rule fails. This field is a member of `oneof`_ ``rule_type``. column (str): @@ -770,17 +772,20 @@ class TableConditionExpectation(proto.Message): ) class SqlAssertion(proto.Message): - r"""Queries for rows returned by the provided SQL statement. If any rows - are are returned, this rule fails. + r"""A SQL statement that is evaluated to return rows that match an + invalid state. If any rows are are returned, this rule fails. - The SQL statement needs to use BigQuery standard SQL syntax, and - must not contain any semicolons. + The SQL statement must use BigQuery standard SQL syntax, and must + not contain any semicolons. - ${data()} can be used to reference the rows being evaluated, i.e. - the table after all additional filters (row filters, incremental - data filters, sampling) are applied. + You can use the data reference parameter ``${data()}`` to reference + the source table with all of its precondition filters applied. + Examples of precondition filters include row filters, incremental + data filters, and sampling. For more information, see `Data + reference + parameter `__. - Example: SELECT \* FROM ${data()} WHERE price < 0 + Example: ``SELECT * FROM ${data()} WHERE price < 0`` Attributes: sql_statement (str): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index f510ebb2e4d9..e64a238b7129 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -445,16 +445,17 @@ def raw_page(self): class GenerateDataQualityRulesRequest(proto.Message): - r"""Generate recommended DataQualityRules request. + r"""Request details for generating data quality rule + recommendations. Attributes: name (str): - Required. The name should be either + Required. The name must be one of the following: - - the name of a datascan with at least one successful - completed data profiling job, or - - the name of a successful completed data profiling - datascan job. + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling job (a + data scan job where the job type is data profiling) """ name: str = proto.Field( @@ -464,12 +465,13 @@ class GenerateDataQualityRulesRequest(proto.Message): class GenerateDataQualityRulesResponse(proto.Message): - r"""Generate recommended DataQualityRules response. + r"""Response details for data quality rule recommendations. Attributes: rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): - Generated recommended {@link - DataQualityRule}s. + The data quality rules that Dataplex + generates based on the results of a data + profiling scan. """ rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( @@ -603,6 +605,9 @@ class ExecutionStatus(proto.Message): The time when the latest DataScanJob started. latest_job_end_time (google.protobuf.timestamp_pb2.Timestamp): The time when the latest DataScanJob ended. + latest_job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the DataScanJob + execution was created. """ latest_job_start_time: timestamp_pb2.Timestamp = proto.Field( @@ -615,6 +620,11 @@ class ExecutionStatus(proto.Message): number=5, message=timestamp_pb2.Timestamp, ) + latest_job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index efb223fced23..dd54639a3934 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -1138,9 +1138,9 @@ class DataQualityScanRuleResult(proto.Message): The number of rows with null values in the specified column. assertion_row_count (int): - The number of rows returned by the sql - statement in the SqlAssertion rule. This field - is only valid for SqlAssertion rules. + The number of rows returned by the SQL + statement in a SQL assertion rule. This field is + only valid for SQL assertion rules. """ class RuleType(proto.Enum): @@ -1150,32 +1150,32 @@ class RuleType(proto.Enum): RULE_TYPE_UNSPECIFIED (0): An unspecified rule type. NON_NULL_EXPECTATION (1): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#nonnullexpectation. + See + [DataQualityRule.NonNullExpectation][google.cloud.dataplex.v1.DataQualityRule.NonNullExpectation]. RANGE_EXPECTATION (2): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#rangeexpectation. + See + [DataQualityRule.RangeExpectation][google.cloud.dataplex.v1.DataQualityRule.RangeExpectation]. REGEX_EXPECTATION (3): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#regexexpectation. + See + [DataQualityRule.RegexExpectation][google.cloud.dataplex.v1.DataQualityRule.RegexExpectation]. ROW_CONDITION_EXPECTATION (4): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#rowconditionexpectation. + See + [DataQualityRule.RowConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.RowConditionExpectation]. SET_EXPECTATION (5): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#setexpectation. + See + [DataQualityRule.SetExpectation][google.cloud.dataplex.v1.DataQualityRule.SetExpectation]. STATISTIC_RANGE_EXPECTATION (6): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#statisticrangeexpectation. + See + [DataQualityRule.StatisticRangeExpectation][google.cloud.dataplex.v1.DataQualityRule.StatisticRangeExpectation]. TABLE_CONDITION_EXPECTATION (7): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#tableconditionexpectation. + See + [DataQualityRule.TableConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.TableConditionExpectation]. UNIQUENESS_EXPECTATION (8): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#uniquenessexpectation. + See + [DataQualityRule.UniquenessExpectation][google.cloud.dataplex.v1.DataQualityRule.UniquenessExpectation]. SQL_ASSERTION (9): - Please see - https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#sqlAssertion. + See + [DataQualityRule.SqlAssertion][google.cloud.dataplex.v1.DataQualityRule.SqlAssertion]. """ RULE_TYPE_UNSPECIFIED = 0 NON_NULL_EXPECTATION = 1 diff --git a/packages/google-cloud-dataplex/noxfile.py b/packages/google-cloud-dataplex/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-dataplex/noxfile.py +++ b/packages/google-cloud-dataplex/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index 2354df1d8081..19c956936ca2 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.0.0" + "version": "2.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 08d7b0e99879..3615a5c76532 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -2727,13 +2727,13 @@ def test_list_entry_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entry_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4922,13 +4922,13 @@ def test_list_aspect_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_aspect_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7113,13 +7113,13 @@ def test_list_entry_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entry_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9240,13 +9240,13 @@ def test_list_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10507,13 +10507,13 @@ def test_search_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.search_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index 0805a486c96b..b922666d49d4 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -4045,13 +4045,13 @@ def test_list_content_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_content(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index 6c558a379755..67cc7c7c4ad4 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -3079,13 +3079,13 @@ def test_list_data_scans_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_scans(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4436,13 +4436,13 @@ def test_list_data_scan_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_scan_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index a77702136507..00b6521ec1ee 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -2801,13 +2801,13 @@ def test_list_data_taxonomies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_taxonomies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5047,13 +5047,13 @@ def test_list_data_attribute_bindings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_attribute_bindings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7276,13 +7276,13 @@ def test_list_data_attributes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_attributes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index ac97d393aa18..13c9395d683e 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -2675,13 +2675,13 @@ def test_list_lakes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_lakes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3646,13 +3646,13 @@ def test_list_lake_actions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_lake_actions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5350,13 +5350,13 @@ def test_list_zones_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_zones(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6321,13 +6321,13 @@ def test_list_zone_actions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_zone_actions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8027,13 +8027,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8995,13 +8995,13 @@ def test_list_asset_actions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_asset_actions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10704,13 +10704,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11646,13 +11646,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14542,13 +14542,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15496,13 +15496,13 @@ def test_list_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index a56f6142d742..f3bbaeaac707 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -3062,13 +3062,13 @@ def test_list_entities_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entities(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4754,13 +4754,13 @@ def test_list_partitions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_partitions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index 021fef08becc..0265ecf586be 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -1590,13 +1590,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3783,13 +3783,13 @@ def test_list_metadata_imports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_metadata_imports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6300,13 +6300,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py index df193997b627..9b34d4bfe16f 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py @@ -1641,13 +1641,13 @@ def test_list_federations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_federations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index 97595e8c9b0b..0e9316715612 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -1590,13 +1590,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3783,13 +3783,13 @@ def test_list_metadata_imports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_metadata_imports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6300,13 +6300,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py index e5672d86ccca..e4081686139d 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py @@ -1641,13 +1641,13 @@ def test_list_federations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_federations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index 31e2b9448895..537fb82cf339 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -1590,13 +1590,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3783,13 +3783,13 @@ def test_list_metadata_imports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_metadata_imports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6300,13 +6300,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py index c8a6222ef7b1..806e09b45a73 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py @@ -1641,13 +1641,13 @@ def test_list_federations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_federations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index 2103594b2890..1e222f173eb8 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.9.3...google-cloud-dataproc-v5.10.0) (2024-06-27) + + +### Features + +* [google-cloud-dataproc] add the cohort and auto tuning configuration to the batch's RuntimeConfig ([#12823](https://github.com/googleapis/google-cloud-python/issues/12823)) ([bbd627b](https://github.com/googleapis/google-cloud-python/commit/bbd627b8354801ab3b897cb2681636bffafd2a9c)) + ## [5.9.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.9.2...google-cloud-dataproc-v5.9.3) (2024-03-05) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py index af0a958e8b84..8c0be718b5bc 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -191,6 +191,7 @@ TerminateSessionRequest, ) from google.cloud.dataproc_v1.types.shared import ( + AutotuningConfig, Component, EnvironmentConfig, ExecutionConfig, @@ -360,6 +361,7 @@ "ListSessionsResponse", "Session", "TerminateSessionRequest", + "AutotuningConfig", "EnvironmentConfig", "ExecutionConfig", "GkeClusterConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..c82ec877e346 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py index 9938594e9f9d..49bcea5780a2 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -171,6 +171,7 @@ TerminateSessionRequest, ) from .types.shared import ( + AutotuningConfig, Component, EnvironmentConfig, ExecutionConfig, @@ -226,6 +227,7 @@ "AutoscalingConfig", "AutoscalingPolicy", "AutoscalingPolicyServiceClient", + "AutotuningConfig", "AuxiliaryNodeGroup", "AuxiliaryServicesConfig", "BasicAutoscalingAlgorithm", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..c82ec877e346 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py index 1b5164521f61..535fc0e4fc92 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -140,6 +140,7 @@ TerminateSessionRequest, ) from .shared import ( + AutotuningConfig, Component, EnvironmentConfig, ExecutionConfig, @@ -293,6 +294,7 @@ "ListSessionsResponse", "Session", "TerminateSessionRequest", + "AutotuningConfig", "EnvironmentConfig", "ExecutionConfig", "GkeClusterConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py index 4ce0e8f18210..ed37c1b8a565 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py @@ -39,6 +39,7 @@ "KubernetesSoftwareConfig", "GkeNodePoolTarget", "GkeNodePoolConfig", + "AutotuningConfig", "RepositoryConfig", "PyPiRepositoryConfig", }, @@ -138,6 +139,13 @@ class RuntimeConfig(proto.Message): repository_config (google.cloud.dataproc_v1.types.RepositoryConfig): Optional. Dependency repository configuration. + autotuning_config (google.cloud.dataproc_v1.types.AutotuningConfig): + Optional. Autotuning configuration of the + workload. + cohort (str): + Optional. Cohort identifier. Identifies + families of the workloads having the same shape, + e.g. daily ETL jobs. """ version: str = proto.Field( @@ -158,6 +166,15 @@ class RuntimeConfig(proto.Message): number=5, message="RepositoryConfig", ) + autotuning_config: "AutotuningConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="AutotuningConfig", + ) + cohort: str = proto.Field( + proto.STRING, + number=7, + ) class EnvironmentConfig(proto.Message): @@ -870,6 +887,43 @@ class GkeNodePoolAutoscalingConfig(proto.Message): ) +class AutotuningConfig(proto.Message): + r"""Autotuning configuration of the workload. + + Attributes: + scenarios (MutableSequence[google.cloud.dataproc_v1.types.AutotuningConfig.Scenario]): + Optional. Scenarios for which tunings are + applied. + """ + + class Scenario(proto.Enum): + r"""Scenario represents a specific goal that autotuning will + attempt to achieve by modifying workloads. + + Values: + SCENARIO_UNSPECIFIED (0): + Default value. + SCALING (2): + Scaling recommendations such as + initialExecutors. + BROADCAST_HASH_JOIN (3): + Adding hints for potential relation + broadcasts. + MEMORY (4): + Memory management for workloads. + """ + SCENARIO_UNSPECIFIED = 0 + SCALING = 2 + BROADCAST_HASH_JOIN = 3 + MEMORY = 4 + + scenarios: MutableSequence[Scenario] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=Scenario, + ) + + class RepositoryConfig(proto.Message): r"""Configuration for dependency repositories diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..57e0c8b275cb 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index 1cc89940bfee..c314d9bd3b68 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -2833,13 +2833,13 @@ def test_list_autoscaling_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_autoscaling_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index c502dc3b2e66..1691995fd89f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -2348,13 +2348,13 @@ def test_list_batches_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_batches(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2932,6 +2932,8 @@ def test_create_batch_rest(request_type): "repository_config": { "pypi_repository_config": {"pypi_repository": "pypi_repository_value"} }, + "autotuning_config": {"scenarios": [2]}, + "cohort": "cohort_value", }, "environment_config": { "execution_config": { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index f126a8040983..ae932995a963 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -3822,8 +3822,8 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata( ( ("project_id", ""), @@ -3833,7 +3833,7 @@ def test_list_clusters_pager(transport_name: str = "grpc"): ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index 205d41fd129c..f3537fc0b001 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -2789,8 +2789,8 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata( ( ("project_id", ""), @@ -2800,7 +2800,7 @@ def test_list_jobs_pager(transport_name: str = "grpc"): ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index dac068a74013..10bfb0977ec8 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -2367,13 +2367,13 @@ def test_list_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3334,6 +3334,8 @@ def test_create_session_rest(request_type): "repository_config": { "pypi_repository_config": {"pypi_repository": "pypi_repository_value"} }, + "autotuning_config": {"scenarios": [2]}, + "cohort": "cohort_value", }, "environment_config": { "execution_config": { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index e44452c224f5..6e47d08c76e2 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -2871,13 +2871,13 @@ def test_list_session_templates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_session_templates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3436,6 +3436,8 @@ def test_create_session_template_rest(request_type): "repository_config": { "pypi_repository_config": {"pypi_repository": "pypi_repository_value"} }, + "autotuning_config": {"scenarios": [2]}, + "cohort": "cohort_value", }, "environment_config": { "execution_config": { @@ -3872,6 +3874,8 @@ def test_update_session_template_rest(request_type): "repository_config": { "pypi_repository_config": {"pypi_repository": "pypi_repository_value"} }, + "autotuning_config": {"scenarios": [2]}, + "cohort": "cohort_value", }, "environment_config": { "execution_config": { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index cf6803cec447..8764e9dcb585 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -3663,13 +3663,13 @@ def test_list_workflow_templates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workflow_templates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index 41c3dab39067..b03093a6caff 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -1536,13 +1536,13 @@ def test_list_connection_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connection_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4045,13 +4045,13 @@ def test_list_streams_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_streams(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6865,13 +6865,13 @@ def test_list_stream_objects_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_stream_objects(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8203,13 +8203,13 @@ def test_fetch_static_ips_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.fetch_static_ips(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9626,13 +9626,13 @@ def test_list_private_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_private_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11376,13 +11376,13 @@ def test_list_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index d96eea473f2c..3e977708fb7b 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -1532,13 +1532,13 @@ def test_list_connection_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connection_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4041,13 +4041,13 @@ def test_list_streams_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_streams(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6416,13 +6416,13 @@ def test_fetch_static_ips_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.fetch_static_ips(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7839,13 +7839,13 @@ def test_list_private_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_private_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9589,13 +9589,13 @@ def test_list_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index f1337c609ff8..558c8aab67c5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index f1337c609ff8..558c8aab67c5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 4379e11d05c5..4c3b150c1e4e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -2374,7 +2374,7 @@ class SkaffoldGCSSource(proto.Message): Attributes: source (str): Required. Cloud Storage source paths to copy recursively. - For example, providing `gs://my-bucket/dir/configs/*` will + For example, providing "gs://my-bucket/dir/configs/*" will result in Skaffold copying all files within the "dir/configs" directory in the bucket "my-bucket". path (str): diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index 7525e48381ce..b4f5eeee1a80 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "1.19.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index df967d566d5a..3cc6b4140c53 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -1540,13 +1540,13 @@ def test_list_delivery_pipelines_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_delivery_pipelines(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3746,13 +3746,13 @@ def test_list_targets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_targets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6264,13 +6264,13 @@ def test_list_custom_target_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_target_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8470,13 +8470,13 @@ def test_list_releases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_releases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11298,13 +11298,13 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_rollouts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13464,13 +13464,13 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_job_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16774,13 +16774,13 @@ def test_list_automations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_automations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17794,13 +17794,13 @@ def test_list_automation_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_automation_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-developerconnect/.OwlBot.yaml b/packages/google-cloud-developerconnect/.OwlBot.yaml new file mode 100644 index 000000000000..853b9ba465c3 --- /dev/null +++ b/packages/google-cloud-developerconnect/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/developerconnect/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-developerconnect/$1 +api-name: google-cloud-developerconnect diff --git a/packages/google-cloud-developerconnect/.coveragerc b/packages/google-cloud-developerconnect/.coveragerc new file mode 100644 index 000000000000..f20f8d6bfba0 --- /dev/null +++ b/packages/google-cloud-developerconnect/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/developerconnect/__init__.py + google/cloud/developerconnect/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-developerconnect/.flake8 b/packages/google-cloud-developerconnect/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-developerconnect/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-developerconnect/.gitignore b/packages/google-cloud-developerconnect/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-developerconnect/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-developerconnect/.repo-metadata.json b/packages/google-cloud-developerconnect/.repo-metadata.json new file mode 100644 index 000000000000..21ff9f3367c0 --- /dev/null +++ b/packages/google-cloud-developerconnect/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-developerconnect", + "name_pretty": "Developer Connect API", + "api_description": "Developer Connect streamlines integration with third-party source code management platforms by simplifying authentication, authorization, and networking configuration.", + "product_documentation": "https://cloud.google.com/developer-connect/docs/overview", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1446966&template=1822025", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-developerconnect", + "api_id": "developerconnect.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "developerconnect" +} diff --git a/packages/google-cloud-developerconnect/CHANGELOG.md b/packages/google-cloud-developerconnect/CHANGELOG.md new file mode 100644 index 000000000000..66a7d780f5ef --- /dev/null +++ b/packages/google-cloud-developerconnect/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-06-05) + + +### Features + +* add initial files for google.cloud.developerconnect.v1 ([#12777](https://github.com/googleapis/google-cloud-python/issues/12777)) ([3deb6c7](https://github.com/googleapis/google-cloud-python/commit/3deb6c728455ca41180527b268d2f18445136520)) + +## Changelog diff --git a/packages/google-cloud-developerconnect/CODE_OF_CONDUCT.md b/packages/google-cloud-developerconnect/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-developerconnect/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-developerconnect/CONTRIBUTING.rst b/packages/google-cloud-developerconnect/CONTRIBUTING.rst new file mode 100644 index 000000000000..cef2e3971452 --- /dev/null +++ b/packages/google-cloud-developerconnect/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-developerconnect + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-developerconnect/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-developerconnect/LICENSE b/packages/google-cloud-developerconnect/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-developerconnect/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-developerconnect/MANIFEST.in b/packages/google-cloud-developerconnect/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-developerconnect/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-developerconnect/README.rst b/packages/google-cloud-developerconnect/README.rst new file mode 100644 index 000000000000..9d37f83622dd --- /dev/null +++ b/packages/google-cloud-developerconnect/README.rst @@ -0,0 +1,108 @@ +Python Client for Developer Connect API +======================================= + +|preview| |pypi| |versions| + +`Developer Connect API`_: Developer Connect streamlines integration with third-party source code management platforms by simplifying authentication, authorization, and networking configuration. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-developerconnect.svg + :target: https://pypi.org/project/google-cloud-developerconnect/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-developerconnect.svg + :target: https://pypi.org/project/google-cloud-developerconnect/ +.. _Developer Connect API: https://cloud.google.com/developer-connect/docs/overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/developer-connect/docs/overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Developer Connect API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Developer Connect API.: https://cloud.google.com/developer-connect/docs/overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-developerconnect + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-developerconnect + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Developer Connect API + to see other available methods on the client. +- Read the `Developer Connect API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Developer Connect API Product documentation: https://cloud.google.com/developer-connect/docs/overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-developerconnect/docs/CHANGELOG.md b/packages/google-cloud-developerconnect/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-developerconnect/docs/README.rst b/packages/google-cloud-developerconnect/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-developerconnect/docs/_static/custom.css b/packages/google-cloud-developerconnect/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-developerconnect/docs/_templates/layout.html b/packages/google-cloud-developerconnect/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-developerconnect/docs/conf.py b/packages/google-cloud-developerconnect/docs/conf.py new file mode 100644 index 000000000000..8ce93a63d2d7 --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-developerconnect documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-developerconnect" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-developerconnect", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-developerconnect-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-developerconnect.tex", + "google-cloud-developerconnect Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-developerconnect", + "google-cloud-developerconnect Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-developerconnect", + "google-cloud-developerconnect Documentation", + author, + "google-cloud-developerconnect", + "google-cloud-developerconnect Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-developerconnect/docs/developerconnect_v1/developer_connect.rst b/packages/google-cloud-developerconnect/docs/developerconnect_v1/developer_connect.rst new file mode 100644 index 000000000000..4c8751c24ac6 --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/developerconnect_v1/developer_connect.rst @@ -0,0 +1,10 @@ +DeveloperConnect +---------------------------------- + +.. automodule:: google.cloud.developerconnect_v1.services.developer_connect + :members: + :inherited-members: + +.. automodule:: google.cloud.developerconnect_v1.services.developer_connect.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-developerconnect/docs/developerconnect_v1/services_.rst b/packages/google-cloud-developerconnect/docs/developerconnect_v1/services_.rst new file mode 100644 index 000000000000..7519636a15c1 --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/developerconnect_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Developerconnect v1 API +================================================= +.. toctree:: + :maxdepth: 2 + + developer_connect diff --git a/packages/google-cloud-developerconnect/docs/developerconnect_v1/types_.rst b/packages/google-cloud-developerconnect/docs/developerconnect_v1/types_.rst new file mode 100644 index 000000000000..6a2111ea407c --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/developerconnect_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Developerconnect v1 API +============================================== + +.. automodule:: google.cloud.developerconnect_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-developerconnect/docs/index.rst b/packages/google-cloud-developerconnect/docs/index.rst new file mode 100644 index 000000000000..be51b2bd530f --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + developerconnect_v1/services_ + developerconnect_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-developerconnect`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-developerconnect/docs/multiprocessing.rst b/packages/google-cloud-developerconnect/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-developerconnect/docs/summary_overview.md b/packages/google-cloud-developerconnect/docs/summary_overview.md new file mode 100644 index 000000000000..ba17242359a3 --- /dev/null +++ b/packages/google-cloud-developerconnect/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Developer Connect API API + +Overview of the APIs available for Developer Connect API API. + +## All entries + +Classes, methods and properties & attributes for +Developer Connect API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest/summary_property.html) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect/__init__.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect/__init__.py new file mode 100644 index 000000000000..d60e51897de6 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/__init__.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.developerconnect import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.developerconnect_v1.services.developer_connect.async_client import ( + DeveloperConnectAsyncClient, +) +from google.cloud.developerconnect_v1.services.developer_connect.client import ( + DeveloperConnectClient, +) +from google.cloud.developerconnect_v1.types.developer_connect import ( + Connection, + CreateConnectionRequest, + CreateGitRepositoryLinkRequest, + DeleteConnectionRequest, + DeleteGitRepositoryLinkRequest, + FetchGitHubInstallationsRequest, + FetchGitHubInstallationsResponse, + FetchGitRefsRequest, + FetchGitRefsResponse, + FetchLinkableGitRepositoriesRequest, + FetchLinkableGitRepositoriesResponse, + FetchReadTokenRequest, + FetchReadTokenResponse, + FetchReadWriteTokenRequest, + FetchReadWriteTokenResponse, + GetConnectionRequest, + GetGitRepositoryLinkRequest, + GitHubConfig, + GitRepositoryLink, + InstallationState, + LinkableGitRepository, + ListConnectionsRequest, + ListConnectionsResponse, + ListGitRepositoryLinksRequest, + ListGitRepositoryLinksResponse, + OAuthCredential, + OperationMetadata, + UpdateConnectionRequest, +) + +__all__ = ( + "DeveloperConnectClient", + "DeveloperConnectAsyncClient", + "Connection", + "CreateConnectionRequest", + "CreateGitRepositoryLinkRequest", + "DeleteConnectionRequest", + "DeleteGitRepositoryLinkRequest", + "FetchGitHubInstallationsRequest", + "FetchGitHubInstallationsResponse", + "FetchGitRefsRequest", + "FetchGitRefsResponse", + "FetchLinkableGitRepositoriesRequest", + "FetchLinkableGitRepositoriesResponse", + "FetchReadTokenRequest", + "FetchReadTokenResponse", + "FetchReadWriteTokenRequest", + "FetchReadWriteTokenResponse", + "GetConnectionRequest", + "GetGitRepositoryLinkRequest", + "GitHubConfig", + "GitRepositoryLink", + "InstallationState", + "LinkableGitRepository", + "ListConnectionsRequest", + "ListConnectionsResponse", + "ListGitRepositoryLinksRequest", + "ListGitRepositoryLinksResponse", + "OAuthCredential", + "OperationMetadata", + "UpdateConnectionRequest", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py similarity index 92% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_version.py rename to packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect/py.typed b/packages/google-cloud-developerconnect/google/cloud/developerconnect/py.typed new file mode 100644 index 000000000000..d2ad2661d0d8 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-developerconnect package uses inline types. diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/__init__.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/__init__.py new file mode 100644 index 000000000000..04905c940ad8 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/__init__.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.developerconnect_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.developer_connect import ( + DeveloperConnectAsyncClient, + DeveloperConnectClient, +) +from .types.developer_connect import ( + Connection, + CreateConnectionRequest, + CreateGitRepositoryLinkRequest, + DeleteConnectionRequest, + DeleteGitRepositoryLinkRequest, + FetchGitHubInstallationsRequest, + FetchGitHubInstallationsResponse, + FetchGitRefsRequest, + FetchGitRefsResponse, + FetchLinkableGitRepositoriesRequest, + FetchLinkableGitRepositoriesResponse, + FetchReadTokenRequest, + FetchReadTokenResponse, + FetchReadWriteTokenRequest, + FetchReadWriteTokenResponse, + GetConnectionRequest, + GetGitRepositoryLinkRequest, + GitHubConfig, + GitRepositoryLink, + InstallationState, + LinkableGitRepository, + ListConnectionsRequest, + ListConnectionsResponse, + ListGitRepositoryLinksRequest, + ListGitRepositoryLinksResponse, + OAuthCredential, + OperationMetadata, + UpdateConnectionRequest, +) + +__all__ = ( + "DeveloperConnectAsyncClient", + "Connection", + "CreateConnectionRequest", + "CreateGitRepositoryLinkRequest", + "DeleteConnectionRequest", + "DeleteGitRepositoryLinkRequest", + "DeveloperConnectClient", + "FetchGitHubInstallationsRequest", + "FetchGitHubInstallationsResponse", + "FetchGitRefsRequest", + "FetchGitRefsResponse", + "FetchLinkableGitRepositoriesRequest", + "FetchLinkableGitRepositoriesResponse", + "FetchReadTokenRequest", + "FetchReadTokenResponse", + "FetchReadWriteTokenRequest", + "FetchReadWriteTokenResponse", + "GetConnectionRequest", + "GetGitRepositoryLinkRequest", + "GitHubConfig", + "GitRepositoryLink", + "InstallationState", + "LinkableGitRepository", + "ListConnectionsRequest", + "ListConnectionsResponse", + "ListGitRepositoryLinksRequest", + "ListGitRepositoryLinksResponse", + "OAuthCredential", + "OperationMetadata", + "UpdateConnectionRequest", +) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_metadata.json b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_metadata.json new file mode 100644 index 000000000000..a89cd3ee5c6a --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_metadata.json @@ -0,0 +1,238 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.developerconnect_v1", + "protoPackage": "google.cloud.developerconnect.v1", + "schema": "1.0", + "services": { + "DeveloperConnect": { + "clients": { + "grpc": { + "libraryClient": "DeveloperConnectClient", + "rpcs": { + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateGitRepositoryLink": { + "methods": [ + "create_git_repository_link" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteGitRepositoryLink": { + "methods": [ + "delete_git_repository_link" + ] + }, + "FetchGitHubInstallations": { + "methods": [ + "fetch_git_hub_installations" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableGitRepositories": { + "methods": [ + "fetch_linkable_git_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetGitRepositoryLink": { + "methods": [ + "get_git_repository_link" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListGitRepositoryLinks": { + "methods": [ + "list_git_repository_links" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DeveloperConnectAsyncClient", + "rpcs": { + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateGitRepositoryLink": { + "methods": [ + "create_git_repository_link" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteGitRepositoryLink": { + "methods": [ + "delete_git_repository_link" + ] + }, + "FetchGitHubInstallations": { + "methods": [ + "fetch_git_hub_installations" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableGitRepositories": { + "methods": [ + "fetch_linkable_git_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetGitRepositoryLink": { + "methods": [ + "get_git_repository_link" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListGitRepositoryLinks": { + "methods": [ + "list_git_repository_links" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "rest": { + "libraryClient": "DeveloperConnectClient", + "rpcs": { + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateGitRepositoryLink": { + "methods": [ + "create_git_repository_link" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteGitRepositoryLink": { + "methods": [ + "delete_git_repository_link" + ] + }, + "FetchGitHubInstallations": { + "methods": [ + "fetch_git_hub_installations" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableGitRepositories": { + "methods": [ + "fetch_linkable_git_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetGitRepositoryLink": { + "methods": [ + "get_git_repository_link" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListGitRepositoryLinks": { + "methods": [ + "list_git_repository_links" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/py.typed b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/py.typed new file mode 100644 index 000000000000..d2ad2661d0d8 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-developerconnect package uses inline types. diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/__init__.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/__init__.py similarity index 100% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/__init__.py rename to packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/__init__.py diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/__init__.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/__init__.py similarity index 74% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/__init__.py rename to packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/__init__.py index 4f87ad61cb9b..13b15791a613 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/__init__.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import MapsPlatformDatasetsV1AlphaAsyncClient -from .client import MapsPlatformDatasetsV1AlphaClient +from .async_client import DeveloperConnectAsyncClient +from .client import DeveloperConnectClient __all__ = ( - "MapsPlatformDatasetsV1AlphaClient", - "MapsPlatformDatasetsV1AlphaAsyncClient", + "DeveloperConnectClient", + "DeveloperConnectAsyncClient", ) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py new file mode 100644 index 000000000000..eaaa1af8250d --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py @@ -0,0 +1,2367 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.developerconnect_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.developerconnect_v1.services.developer_connect import pagers +from google.cloud.developerconnect_v1.types import developer_connect + +from .client import DeveloperConnectClient +from .transports.base import DEFAULT_CLIENT_INFO, DeveloperConnectTransport +from .transports.grpc_asyncio import DeveloperConnectGrpcAsyncIOTransport + + +class DeveloperConnectAsyncClient: + """Service describing handlers for resources""" + + _client: DeveloperConnectClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DeveloperConnectClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DeveloperConnectClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DeveloperConnectClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DeveloperConnectClient._DEFAULT_UNIVERSE + + connection_path = staticmethod(DeveloperConnectClient.connection_path) + parse_connection_path = staticmethod(DeveloperConnectClient.parse_connection_path) + git_repository_link_path = staticmethod( + DeveloperConnectClient.git_repository_link_path + ) + parse_git_repository_link_path = staticmethod( + DeveloperConnectClient.parse_git_repository_link_path + ) + secret_version_path = staticmethod(DeveloperConnectClient.secret_version_path) + parse_secret_version_path = staticmethod( + DeveloperConnectClient.parse_secret_version_path + ) + common_billing_account_path = staticmethod( + DeveloperConnectClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DeveloperConnectClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DeveloperConnectClient.common_folder_path) + parse_common_folder_path = staticmethod( + DeveloperConnectClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DeveloperConnectClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DeveloperConnectClient.parse_common_organization_path + ) + common_project_path = staticmethod(DeveloperConnectClient.common_project_path) + parse_common_project_path = staticmethod( + DeveloperConnectClient.parse_common_project_path + ) + common_location_path = staticmethod(DeveloperConnectClient.common_location_path) + parse_common_location_path = staticmethod( + DeveloperConnectClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeveloperConnectAsyncClient: The constructed client. + """ + return DeveloperConnectClient.from_service_account_info.__func__(DeveloperConnectAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeveloperConnectAsyncClient: The constructed client. + """ + return DeveloperConnectClient.from_service_account_file.__func__(DeveloperConnectAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DeveloperConnectClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DeveloperConnectTransport: + """Returns the transport used by the client instance. + + Returns: + DeveloperConnectTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(DeveloperConnectClient).get_transport_class, type(DeveloperConnectClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, DeveloperConnectTransport, Callable[..., DeveloperConnectTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the developer connect async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DeveloperConnectTransport,Callable[..., DeveloperConnectTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DeveloperConnectTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DeveloperConnectClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_connections( + self, + request: Optional[Union[developer_connect.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsAsyncPager: + r"""Lists Connections in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_list_connections(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.ListConnectionsRequest, dict]]): + The request object. Message for requesting list of + Connections + parent (:class:`str`): + Required. Parent value for + ListConnectionsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.ListConnectionsAsyncPager: + Message for response to listing + Connections + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.ListConnectionsRequest): + request = developer_connect.ListConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_connections + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection( + self, + request: Optional[Union[developer_connect.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.Connection: + r"""Gets details of a single Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_get_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.GetConnectionRequest, dict]]): + The request object. Message for getting a Connection + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.Connection: + Message describing Connection object + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.GetConnectionRequest): + request = developer_connect.GetConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_connection( + self, + request: Optional[ + Union[developer_connect.CreateConnectionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + connection: Optional[developer_connect.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Connection in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_create_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + connection=connection, + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.CreateConnectionRequest, dict]]): + The request object. Message for creating a Connection + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (:class:`google.cloud.developerconnect_v1.types.Connection`): + Required. The resource being created + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (:class:`str`): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and connection_id from + the method_signature of Create RPC + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.developerconnect_v1.types.Connection` + Message describing Connection object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.CreateConnectionRequest): + request = developer_connect.CreateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + developer_connect.Connection, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_connection( + self, + request: Optional[ + Union[developer_connect.UpdateConnectionRequest, dict] + ] = None, + *, + connection: Optional[developer_connect.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_update_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.UpdateConnectionRequest( + connection=connection, + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.UpdateConnectionRequest, dict]]): + The request object. Message for updating a Connection + connection (:class:`google.cloud.developerconnect_v1.types.Connection`): + Required. The resource being updated + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Connection resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.developerconnect_v1.types.Connection` + Message describing Connection object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.UpdateConnectionRequest): + request = developer_connect.UpdateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("connection.name", request.connection.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + developer_connect.Connection, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_connection( + self, + request: Optional[ + Union[developer_connect.DeleteConnectionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_delete_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.DeleteConnectionRequest, dict]]): + The request object. Message for deleting a Connection + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.DeleteConnectionRequest): + request = developer_connect.DeleteConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_git_repository_link( + self, + request: Optional[ + Union[developer_connect.CreateGitRepositoryLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + git_repository_link: Optional[developer_connect.GitRepositoryLink] = None, + git_repository_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a GitRepositoryLink. Upon linking a Git + Repository, Developer Connect will configure the Git + Repository to send webhook events to Developer Connect. + Connections that use Firebase GitHub Application will + have events forwarded to the Firebase service. All other + Connections will have events forwarded to Cloud Build. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_create_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + git_repository_link = developerconnect_v1.GitRepositoryLink() + git_repository_link.clone_uri = "clone_uri_value" + + request = developerconnect_v1.CreateGitRepositoryLinkRequest( + parent="parent_value", + git_repository_link=git_repository_link, + git_repository_link_id="git_repository_link_id_value", + ) + + # Make the request + operation = client.create_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.CreateGitRepositoryLinkRequest, dict]]): + The request object. Message for creating a + GitRepositoryLink + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + git_repository_link (:class:`google.cloud.developerconnect_v1.types.GitRepositoryLink`): + Required. The resource being created + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + git_repository_link_id (:class:`str`): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + + This corresponds to the ``git_repository_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.developerconnect_v1.types.GitRepositoryLink` + Message describing the GitRepositoryLink object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, git_repository_link, git_repository_link_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.CreateGitRepositoryLinkRequest): + request = developer_connect.CreateGitRepositoryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if git_repository_link is not None: + request.git_repository_link = git_repository_link + if git_repository_link_id is not None: + request.git_repository_link_id = git_repository_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_git_repository_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + developer_connect.GitRepositoryLink, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_git_repository_link( + self, + request: Optional[ + Union[developer_connect.DeleteGitRepositoryLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single GitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_delete_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.DeleteGitRepositoryLinkRequest, dict]]): + The request object. Message for deleting a + GitRepositoryLink + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.DeleteGitRepositoryLinkRequest): + request = developer_connect.DeleteGitRepositoryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_git_repository_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_git_repository_links( + self, + request: Optional[ + Union[developer_connect.ListGitRepositoryLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGitRepositoryLinksAsyncPager: + r"""Lists GitRepositoryLinks in a given project, + location, and connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_list_git_repository_links(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.ListGitRepositoryLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_git_repository_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.ListGitRepositoryLinksRequest, dict]]): + The request object. Message for requesting a list of + GitRepositoryLinks + parent (:class:`str`): + Required. Parent value for + ListGitRepositoryLinksRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.ListGitRepositoryLinksAsyncPager: + Message for response to listing + GitRepositoryLinks + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.ListGitRepositoryLinksRequest): + request = developer_connect.ListGitRepositoryLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_git_repository_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGitRepositoryLinksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_git_repository_link( + self, + request: Optional[ + Union[developer_connect.GetGitRepositoryLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.GitRepositoryLink: + r"""Gets details of a single GitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_get_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_git_repository_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.GetGitRepositoryLinkRequest, dict]]): + The request object. Message for getting a + GitRepositoryLink + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.GitRepositoryLink: + Message describing the + GitRepositoryLink object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.GetGitRepositoryLinkRequest): + request = developer_connect.GetGitRepositoryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_git_repository_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_read_write_token( + self, + request: Optional[ + Union[developer_connect.FetchReadWriteTokenRequest, dict] + ] = None, + *, + git_repository_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchReadWriteTokenResponse: + r"""Fetches read/write token of a given + gitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_fetch_read_write_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadWriteTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = await client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.FetchReadWriteTokenRequest, dict]]): + The request object. Message for fetching SCM read/write + token. + git_repository_link (:class:`str`): + Required. The resource name of the gitRepositoryLink in + the format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([git_repository_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchReadWriteTokenRequest): + request = developer_connect.FetchReadWriteTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if git_repository_link is not None: + request.git_repository_link = git_repository_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_read_write_token + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("git_repository_link", request.git_repository_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_read_token( + self, + request: Optional[Union[developer_connect.FetchReadTokenRequest, dict]] = None, + *, + git_repository_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchReadTokenResponse: + r"""Fetches read token of a given gitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_fetch_read_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = await client.fetch_read_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.FetchReadTokenRequest, dict]]): + The request object. Message for fetching SCM read token. + git_repository_link (:class:`str`): + Required. The resource name of the gitRepositoryLink in + the format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([git_repository_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchReadTokenRequest): + request = developer_connect.FetchReadTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if git_repository_link is not None: + request.git_repository_link = git_repository_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_read_token + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("git_repository_link", request.git_repository_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_linkable_git_repositories( + self, + request: Optional[ + Union[developer_connect.FetchLinkableGitRepositoriesRequest, dict] + ] = None, + *, + connection: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchLinkableGitRepositoriesAsyncPager: + r"""FetchLinkableGitRepositories returns a list of git + repositories from an SCM that are available to be added + to a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_fetch_linkable_git_repositories(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchLinkableGitRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_git_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesRequest, dict]]): + The request object. Request message for + FetchLinkableGitRepositoriesRequest. + connection (:class:`str`): + Required. The name of the Connection. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchLinkableGitRepositoriesAsyncPager: + Response message for + FetchLinkableGitRepositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, developer_connect.FetchLinkableGitRepositoriesRequest + ): + request = developer_connect.FetchLinkableGitRepositoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_linkable_git_repositories + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("connection", request.connection),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchLinkableGitRepositoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_git_hub_installations( + self, + request: Optional[ + Union[developer_connect.FetchGitHubInstallationsRequest, dict] + ] = None, + *, + connection: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchGitHubInstallationsResponse: + r"""FetchGitHubInstallations returns the list of GitHub + Installations that are available to be added to a + Connection. For github.com, only installations + accessible to the authorizer token are returned. For + GitHub Enterprise, all installations are returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_fetch_git_hub_installations(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitHubInstallationsRequest( + connection="connection_value", + ) + + # Make the request + response = await client.fetch_git_hub_installations(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.FetchGitHubInstallationsRequest, dict]]): + The request object. Request for fetching github + installations. + connection (:class:`str`): + Required. The resource name of the connection in the + format ``projects/*/locations/*/connections/*``. + + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.FetchGitHubInstallationsResponse: + Response of fetching github + installations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchGitHubInstallationsRequest): + request = developer_connect.FetchGitHubInstallationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_git_hub_installations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("connection", request.connection),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_git_refs( + self, + request: Optional[Union[developer_connect.FetchGitRefsRequest, dict]] = None, + *, + git_repository_link: Optional[str] = None, + ref_type: Optional[developer_connect.FetchGitRefsRequest.RefType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchGitRefsAsyncPager: + r"""Fetch the list of branches or tags for a given + repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + async def sample_fetch_git_refs(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitRefsRequest( + git_repository_link="git_repository_link_value", + ref_type="BRANCH", + ) + + # Make the request + page_result = client.fetch_git_refs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.developerconnect_v1.types.FetchGitRefsRequest, dict]]): + The request object. Request for fetching git refs. + git_repository_link (:class:`str`): + Required. The resource name of GitRepositoryLink in the + format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ref_type (:class:`google.cloud.developerconnect_v1.types.FetchGitRefsRequest.RefType`): + Required. Type of refs to fetch. + This corresponds to the ``ref_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchGitRefsAsyncPager: + Response for fetching git refs. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([git_repository_link, ref_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchGitRefsRequest): + request = developer_connect.FetchGitRefsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if git_repository_link is not None: + request.git_repository_link = git_repository_link + if ref_type is not None: + request.ref_type = ref_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_git_refs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("git_repository_link", request.git_repository_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchGitRefsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DeveloperConnectAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DeveloperConnectAsyncClient",) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py new file mode 100644 index 000000000000..c955694a5be0 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py @@ -0,0 +1,2801 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.developerconnect_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.developerconnect_v1.services.developer_connect import pagers +from google.cloud.developerconnect_v1.types import developer_connect + +from .transports.base import DEFAULT_CLIENT_INFO, DeveloperConnectTransport +from .transports.grpc import DeveloperConnectGrpcTransport +from .transports.grpc_asyncio import DeveloperConnectGrpcAsyncIOTransport +from .transports.rest import DeveloperConnectRestTransport + + +class DeveloperConnectClientMeta(type): + """Metaclass for the DeveloperConnect client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DeveloperConnectTransport]] + _transport_registry["grpc"] = DeveloperConnectGrpcTransport + _transport_registry["grpc_asyncio"] = DeveloperConnectGrpcAsyncIOTransport + _transport_registry["rest"] = DeveloperConnectRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DeveloperConnectTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DeveloperConnectClient(metaclass=DeveloperConnectClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "developerconnect.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "developerconnect.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeveloperConnectClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeveloperConnectClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DeveloperConnectTransport: + """Returns the transport used by the client instance. + + Returns: + DeveloperConnectTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_path( + project: str, + location: str, + connection: str, + ) -> str: + """Returns a fully-qualified connection string.""" + return ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str, str]: + """Parses a connection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def git_repository_link_path( + project: str, + location: str, + connection: str, + git_repository_link: str, + ) -> str: + """Returns a fully-qualified git_repository_link string.""" + return "projects/{project}/locations/{location}/connections/{connection}/gitRepositoryLinks/{git_repository_link}".format( + project=project, + location=location, + connection=connection, + git_repository_link=git_repository_link, + ) + + @staticmethod + def parse_git_repository_link_path(path: str) -> Dict[str, str]: + """Parses a git_repository_link path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/gitRepositoryLinks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def secret_version_path( + project: str, + secret: str, + secret_version: str, + ) -> str: + """Returns a fully-qualified secret_version string.""" + return "projects/{project}/secrets/{secret}/versions/{secret_version}".format( + project=project, + secret=secret, + secret_version=secret_version, + ) + + @staticmethod + def parse_secret_version_path(path: str) -> Dict[str, str]: + """Parses a secret_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DeveloperConnectClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DeveloperConnectClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DeveloperConnectClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DeveloperConnectClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DeveloperConnectClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DeveloperConnectClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, DeveloperConnectTransport, Callable[..., DeveloperConnectTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the developer connect client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DeveloperConnectTransport,Callable[..., DeveloperConnectTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DeveloperConnectTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DeveloperConnectClient._read_environment_variables() + self._client_cert_source = DeveloperConnectClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DeveloperConnectClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DeveloperConnectTransport) + if transport_provided: + # transport is a DeveloperConnectTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DeveloperConnectTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DeveloperConnectClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[DeveloperConnectTransport], + Callable[..., DeveloperConnectTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DeveloperConnectTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_connections( + self, + request: Optional[Union[developer_connect.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsPager: + r"""Lists Connections in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_list_connections(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.ListConnectionsRequest, dict]): + The request object. Message for requesting list of + Connections + parent (str): + Required. Parent value for + ListConnectionsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.ListConnectionsPager: + Message for response to listing + Connections + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.ListConnectionsRequest): + request = developer_connect.ListConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection( + self, + request: Optional[Union[developer_connect.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.Connection: + r"""Gets details of a single Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_get_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.GetConnectionRequest, dict]): + The request object. Message for getting a Connection + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.Connection: + Message describing Connection object + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.GetConnectionRequest): + request = developer_connect.GetConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_connection( + self, + request: Optional[ + Union[developer_connect.CreateConnectionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + connection: Optional[developer_connect.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Connection in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_create_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + connection=connection, + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.CreateConnectionRequest, dict]): + The request object. Message for creating a Connection + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (google.cloud.developerconnect_v1.types.Connection): + Required. The resource being created + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (str): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and connection_id from + the method_signature of Create RPC + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.developerconnect_v1.types.Connection` + Message describing Connection object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.CreateConnectionRequest): + request = developer_connect.CreateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + developer_connect.Connection, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_connection( + self, + request: Optional[ + Union[developer_connect.UpdateConnectionRequest, dict] + ] = None, + *, + connection: Optional[developer_connect.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_update_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.UpdateConnectionRequest( + connection=connection, + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.UpdateConnectionRequest, dict]): + The request object. Message for updating a Connection + connection (google.cloud.developerconnect_v1.types.Connection): + Required. The resource being updated + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Connection resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.developerconnect_v1.types.Connection` + Message describing Connection object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.UpdateConnectionRequest): + request = developer_connect.UpdateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("connection.name", request.connection.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + developer_connect.Connection, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_connection( + self, + request: Optional[ + Union[developer_connect.DeleteConnectionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_delete_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.DeleteConnectionRequest, dict]): + The request object. Message for deleting a Connection + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.DeleteConnectionRequest): + request = developer_connect.DeleteConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_git_repository_link( + self, + request: Optional[ + Union[developer_connect.CreateGitRepositoryLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + git_repository_link: Optional[developer_connect.GitRepositoryLink] = None, + git_repository_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a GitRepositoryLink. Upon linking a Git + Repository, Developer Connect will configure the Git + Repository to send webhook events to Developer Connect. + Connections that use Firebase GitHub Application will + have events forwarded to the Firebase service. All other + Connections will have events forwarded to Cloud Build. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_create_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + git_repository_link = developerconnect_v1.GitRepositoryLink() + git_repository_link.clone_uri = "clone_uri_value" + + request = developerconnect_v1.CreateGitRepositoryLinkRequest( + parent="parent_value", + git_repository_link=git_repository_link, + git_repository_link_id="git_repository_link_id_value", + ) + + # Make the request + operation = client.create_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.CreateGitRepositoryLinkRequest, dict]): + The request object. Message for creating a + GitRepositoryLink + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + git_repository_link (google.cloud.developerconnect_v1.types.GitRepositoryLink): + Required. The resource being created + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + git_repository_link_id (str): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + + This corresponds to the ``git_repository_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.developerconnect_v1.types.GitRepositoryLink` + Message describing the GitRepositoryLink object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, git_repository_link, git_repository_link_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.CreateGitRepositoryLinkRequest): + request = developer_connect.CreateGitRepositoryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if git_repository_link is not None: + request.git_repository_link = git_repository_link + if git_repository_link_id is not None: + request.git_repository_link_id = git_repository_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_git_repository_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + developer_connect.GitRepositoryLink, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_git_repository_link( + self, + request: Optional[ + Union[developer_connect.DeleteGitRepositoryLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single GitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_delete_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.DeleteGitRepositoryLinkRequest, dict]): + The request object. Message for deleting a + GitRepositoryLink + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.DeleteGitRepositoryLinkRequest): + request = developer_connect.DeleteGitRepositoryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_git_repository_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=developer_connect.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_git_repository_links( + self, + request: Optional[ + Union[developer_connect.ListGitRepositoryLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGitRepositoryLinksPager: + r"""Lists GitRepositoryLinks in a given project, + location, and connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_list_git_repository_links(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.ListGitRepositoryLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_git_repository_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.ListGitRepositoryLinksRequest, dict]): + The request object. Message for requesting a list of + GitRepositoryLinks + parent (str): + Required. Parent value for + ListGitRepositoryLinksRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.ListGitRepositoryLinksPager: + Message for response to listing + GitRepositoryLinks + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.ListGitRepositoryLinksRequest): + request = developer_connect.ListGitRepositoryLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_git_repository_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGitRepositoryLinksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_git_repository_link( + self, + request: Optional[ + Union[developer_connect.GetGitRepositoryLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.GitRepositoryLink: + r"""Gets details of a single GitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_get_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_git_repository_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.GetGitRepositoryLinkRequest, dict]): + The request object. Message for getting a + GitRepositoryLink + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.GitRepositoryLink: + Message describing the + GitRepositoryLink object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.GetGitRepositoryLinkRequest): + request = developer_connect.GetGitRepositoryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_git_repository_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_read_write_token( + self, + request: Optional[ + Union[developer_connect.FetchReadWriteTokenRequest, dict] + ] = None, + *, + git_repository_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchReadWriteTokenResponse: + r"""Fetches read/write token of a given + gitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_fetch_read_write_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadWriteTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.FetchReadWriteTokenRequest, dict]): + The request object. Message for fetching SCM read/write + token. + git_repository_link (str): + Required. The resource name of the gitRepositoryLink in + the format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([git_repository_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchReadWriteTokenRequest): + request = developer_connect.FetchReadWriteTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if git_repository_link is not None: + request.git_repository_link = git_repository_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_read_write_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("git_repository_link", request.git_repository_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_read_token( + self, + request: Optional[Union[developer_connect.FetchReadTokenRequest, dict]] = None, + *, + git_repository_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchReadTokenResponse: + r"""Fetches read token of a given gitRepositoryLink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_fetch_read_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = client.fetch_read_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.FetchReadTokenRequest, dict]): + The request object. Message for fetching SCM read token. + git_repository_link (str): + Required. The resource name of the gitRepositoryLink in + the format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([git_repository_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchReadTokenRequest): + request = developer_connect.FetchReadTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if git_repository_link is not None: + request.git_repository_link = git_repository_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_read_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("git_repository_link", request.git_repository_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_linkable_git_repositories( + self, + request: Optional[ + Union[developer_connect.FetchLinkableGitRepositoriesRequest, dict] + ] = None, + *, + connection: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchLinkableGitRepositoriesPager: + r"""FetchLinkableGitRepositories returns a list of git + repositories from an SCM that are available to be added + to a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_fetch_linkable_git_repositories(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchLinkableGitRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_git_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesRequest, dict]): + The request object. Request message for + FetchLinkableGitRepositoriesRequest. + connection (str): + Required. The name of the Connection. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchLinkableGitRepositoriesPager: + Response message for + FetchLinkableGitRepositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, developer_connect.FetchLinkableGitRepositoriesRequest + ): + request = developer_connect.FetchLinkableGitRepositoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_linkable_git_repositories + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("connection", request.connection),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchLinkableGitRepositoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_git_hub_installations( + self, + request: Optional[ + Union[developer_connect.FetchGitHubInstallationsRequest, dict] + ] = None, + *, + connection: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchGitHubInstallationsResponse: + r"""FetchGitHubInstallations returns the list of GitHub + Installations that are available to be added to a + Connection. For github.com, only installations + accessible to the authorizer token are returned. For + GitHub Enterprise, all installations are returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_fetch_git_hub_installations(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitHubInstallationsRequest( + connection="connection_value", + ) + + # Make the request + response = client.fetch_git_hub_installations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.FetchGitHubInstallationsRequest, dict]): + The request object. Request for fetching github + installations. + connection (str): + Required. The resource name of the connection in the + format ``projects/*/locations/*/connections/*``. + + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.types.FetchGitHubInstallationsResponse: + Response of fetching github + installations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchGitHubInstallationsRequest): + request = developer_connect.FetchGitHubInstallationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_git_hub_installations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("connection", request.connection),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_git_refs( + self, + request: Optional[Union[developer_connect.FetchGitRefsRequest, dict]] = None, + *, + git_repository_link: Optional[str] = None, + ref_type: Optional[developer_connect.FetchGitRefsRequest.RefType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchGitRefsPager: + r"""Fetch the list of branches or tags for a given + repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import developerconnect_v1 + + def sample_fetch_git_refs(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitRefsRequest( + git_repository_link="git_repository_link_value", + ref_type="BRANCH", + ) + + # Make the request + page_result = client.fetch_git_refs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.developerconnect_v1.types.FetchGitRefsRequest, dict]): + The request object. Request for fetching git refs. + git_repository_link (str): + Required. The resource name of GitRepositoryLink in the + format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + + This corresponds to the ``git_repository_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ref_type (google.cloud.developerconnect_v1.types.FetchGitRefsRequest.RefType): + Required. Type of refs to fetch. + This corresponds to the ``ref_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchGitRefsPager: + Response for fetching git refs. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([git_repository_link, ref_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, developer_connect.FetchGitRefsRequest): + request = developer_connect.FetchGitRefsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if git_repository_link is not None: + request.git_repository_link = git_repository_link + if ref_type is not None: + request.ref_type = ref_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_git_refs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("git_repository_link", request.git_repository_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchGitRefsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DeveloperConnectClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DeveloperConnectClient",) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/pagers.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/pagers.py new file mode 100644 index 000000000000..aaedf10f7a58 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/pagers.py @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.developerconnect_v1.types import developer_connect + + +class ListConnectionsPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.ListConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., developer_connect.ListConnectionsResponse], + request: developer_connect.ListConnectionsRequest, + response: developer_connect.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[developer_connect.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[developer_connect.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConnectionsAsyncPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.ListConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[developer_connect.ListConnectionsResponse]], + request: developer_connect.ListConnectionsRequest, + response: developer_connect.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[developer_connect.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[developer_connect.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGitRepositoryLinksPager: + """A pager for iterating through ``list_git_repository_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.ListGitRepositoryLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``git_repository_links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGitRepositoryLinks`` requests and continue to iterate + through the ``git_repository_links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.ListGitRepositoryLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., developer_connect.ListGitRepositoryLinksResponse], + request: developer_connect.ListGitRepositoryLinksRequest, + response: developer_connect.ListGitRepositoryLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.ListGitRepositoryLinksRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.ListGitRepositoryLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.ListGitRepositoryLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[developer_connect.ListGitRepositoryLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[developer_connect.GitRepositoryLink]: + for page in self.pages: + yield from page.git_repository_links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGitRepositoryLinksAsyncPager: + """A pager for iterating through ``list_git_repository_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.ListGitRepositoryLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``git_repository_links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGitRepositoryLinks`` requests and continue to iterate + through the ``git_repository_links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.ListGitRepositoryLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[developer_connect.ListGitRepositoryLinksResponse] + ], + request: developer_connect.ListGitRepositoryLinksRequest, + response: developer_connect.ListGitRepositoryLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.ListGitRepositoryLinksRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.ListGitRepositoryLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.ListGitRepositoryLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[developer_connect.ListGitRepositoryLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[developer_connect.GitRepositoryLink]: + async def async_generator(): + async for page in self.pages: + for response in page.git_repository_links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchLinkableGitRepositoriesPager: + """A pager for iterating through ``fetch_linkable_git_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``linkable_git_repositories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchLinkableGitRepositories`` requests and continue to iterate + through the ``linkable_git_repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., developer_connect.FetchLinkableGitRepositoriesResponse], + request: developer_connect.FetchLinkableGitRepositoriesRequest, + response: developer_connect.FetchLinkableGitRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.FetchLinkableGitRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[developer_connect.FetchLinkableGitRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[developer_connect.LinkableGitRepository]: + for page in self.pages: + yield from page.linkable_git_repositories + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchLinkableGitRepositoriesAsyncPager: + """A pager for iterating through ``fetch_linkable_git_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``linkable_git_repositories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchLinkableGitRepositories`` requests and continue to iterate + through the ``linkable_git_repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[developer_connect.FetchLinkableGitRepositoriesResponse] + ], + request: developer_connect.FetchLinkableGitRepositoriesRequest, + response: developer_connect.FetchLinkableGitRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.FetchLinkableGitRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[developer_connect.FetchLinkableGitRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[developer_connect.LinkableGitRepository]: + async def async_generator(): + async for page in self.pages: + for response in page.linkable_git_repositories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchGitRefsPager: + """A pager for iterating through ``fetch_git_refs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.FetchGitRefsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``ref_names`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchGitRefs`` requests and continue to iterate + through the ``ref_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.FetchGitRefsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., developer_connect.FetchGitRefsResponse], + request: developer_connect.FetchGitRefsRequest, + response: developer_connect.FetchGitRefsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.FetchGitRefsRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.FetchGitRefsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.FetchGitRefsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[developer_connect.FetchGitRefsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.ref_names + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchGitRefsAsyncPager: + """A pager for iterating through ``fetch_git_refs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.developerconnect_v1.types.FetchGitRefsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``ref_names`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchGitRefs`` requests and continue to iterate + through the ``ref_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.developerconnect_v1.types.FetchGitRefsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[developer_connect.FetchGitRefsResponse]], + request: developer_connect.FetchGitRefsRequest, + response: developer_connect.FetchGitRefsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.developerconnect_v1.types.FetchGitRefsRequest): + The initial request object. + response (google.cloud.developerconnect_v1.types.FetchGitRefsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = developer_connect.FetchGitRefsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[developer_connect.FetchGitRefsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.ref_names: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/__init__.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/__init__.py new file mode 100644 index 000000000000..e0049e3a9195 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DeveloperConnectTransport +from .grpc import DeveloperConnectGrpcTransport +from .grpc_asyncio import DeveloperConnectGrpcAsyncIOTransport +from .rest import DeveloperConnectRestInterceptor, DeveloperConnectRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DeveloperConnectTransport]] +_transport_registry["grpc"] = DeveloperConnectGrpcTransport +_transport_registry["grpc_asyncio"] = DeveloperConnectGrpcAsyncIOTransport +_transport_registry["rest"] = DeveloperConnectRestTransport + +__all__ = ( + "DeveloperConnectTransport", + "DeveloperConnectGrpcTransport", + "DeveloperConnectGrpcAsyncIOTransport", + "DeveloperConnectRestTransport", + "DeveloperConnectRestInterceptor", +) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/base.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/base.py new file mode 100644 index 000000000000..7ca321119441 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/base.py @@ -0,0 +1,534 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.developerconnect_v1 import gapic_version as package_version +from google.cloud.developerconnect_v1.types import developer_connect + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DeveloperConnectTransport(abc.ABC): + """Abstract transport class for DeveloperConnect.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "developerconnect.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'developerconnect.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_connections: gapic_v1.method.wrap_method( + self.list_connections, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection: gapic_v1.method.wrap_method( + self.get_connection, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_connection: gapic_v1.method.wrap_method( + self.create_connection, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection: gapic_v1.method.wrap_method( + self.update_connection, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection: gapic_v1.method.wrap_method( + self.delete_connection, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_git_repository_link: gapic_v1.method.wrap_method( + self.create_git_repository_link, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_git_repository_link: gapic_v1.method.wrap_method( + self.delete_git_repository_link, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_git_repository_links: gapic_v1.method.wrap_method( + self.list_git_repository_links, + default_timeout=None, + client_info=client_info, + ), + self.get_git_repository_link: gapic_v1.method.wrap_method( + self.get_git_repository_link, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_write_token: gapic_v1.method.wrap_method( + self.fetch_read_write_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_token: gapic_v1.method.wrap_method( + self.fetch_read_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_linkable_git_repositories: gapic_v1.method.wrap_method( + self.fetch_linkable_git_repositories, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_git_hub_installations: gapic_v1.method.wrap_method( + self.fetch_git_hub_installations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_git_refs: gapic_v1.method.wrap_method( + self.fetch_git_refs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_connections( + self, + ) -> Callable[ + [developer_connect.ListConnectionsRequest], + Union[ + developer_connect.ListConnectionsResponse, + Awaitable[developer_connect.ListConnectionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_connection( + self, + ) -> Callable[ + [developer_connect.GetConnectionRequest], + Union[developer_connect.Connection, Awaitable[developer_connect.Connection]], + ]: + raise NotImplementedError() + + @property + def create_connection( + self, + ) -> Callable[ + [developer_connect.CreateConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_connection( + self, + ) -> Callable[ + [developer_connect.UpdateConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_connection( + self, + ) -> Callable[ + [developer_connect.DeleteConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_git_repository_link( + self, + ) -> Callable[ + [developer_connect.CreateGitRepositoryLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_git_repository_link( + self, + ) -> Callable[ + [developer_connect.DeleteGitRepositoryLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_git_repository_links( + self, + ) -> Callable[ + [developer_connect.ListGitRepositoryLinksRequest], + Union[ + developer_connect.ListGitRepositoryLinksResponse, + Awaitable[developer_connect.ListGitRepositoryLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_git_repository_link( + self, + ) -> Callable[ + [developer_connect.GetGitRepositoryLinkRequest], + Union[ + developer_connect.GitRepositoryLink, + Awaitable[developer_connect.GitRepositoryLink], + ], + ]: + raise NotImplementedError() + + @property + def fetch_read_write_token( + self, + ) -> Callable[ + [developer_connect.FetchReadWriteTokenRequest], + Union[ + developer_connect.FetchReadWriteTokenResponse, + Awaitable[developer_connect.FetchReadWriteTokenResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_read_token( + self, + ) -> Callable[ + [developer_connect.FetchReadTokenRequest], + Union[ + developer_connect.FetchReadTokenResponse, + Awaitable[developer_connect.FetchReadTokenResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_linkable_git_repositories( + self, + ) -> Callable[ + [developer_connect.FetchLinkableGitRepositoriesRequest], + Union[ + developer_connect.FetchLinkableGitRepositoriesResponse, + Awaitable[developer_connect.FetchLinkableGitRepositoriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_git_hub_installations( + self, + ) -> Callable[ + [developer_connect.FetchGitHubInstallationsRequest], + Union[ + developer_connect.FetchGitHubInstallationsResponse, + Awaitable[developer_connect.FetchGitHubInstallationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_git_refs( + self, + ) -> Callable[ + [developer_connect.FetchGitRefsRequest], + Union[ + developer_connect.FetchGitRefsResponse, + Awaitable[developer_connect.FetchGitRefsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DeveloperConnectTransport",) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/grpc.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/grpc.py new file mode 100644 index 000000000000..d0574885c994 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/grpc.py @@ -0,0 +1,784 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.developerconnect_v1.types import developer_connect + +from .base import DEFAULT_CLIENT_INFO, DeveloperConnectTransport + + +class DeveloperConnectGrpcTransport(DeveloperConnectTransport): + """gRPC backend transport for DeveloperConnect. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "developerconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'developerconnect.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "developerconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_connections( + self, + ) -> Callable[ + [developer_connect.ListConnectionsRequest], + developer_connect.ListConnectionsResponse, + ]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a given project and location. + + Returns: + Callable[[~.ListConnectionsRequest], + ~.ListConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_connections" not in self._stubs: + self._stubs["list_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/ListConnections", + request_serializer=developer_connect.ListConnectionsRequest.serialize, + response_deserializer=developer_connect.ListConnectionsResponse.deserialize, + ) + return self._stubs["list_connections"] + + @property + def get_connection( + self, + ) -> Callable[ + [developer_connect.GetConnectionRequest], developer_connect.Connection + ]: + r"""Return a callable for the get connection method over gRPC. + + Gets details of a single Connection. + + Returns: + Callable[[~.GetConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection" not in self._stubs: + self._stubs["get_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/GetConnection", + request_serializer=developer_connect.GetConnectionRequest.serialize, + response_deserializer=developer_connect.Connection.deserialize, + ) + return self._stubs["get_connection"] + + @property + def create_connection( + self, + ) -> Callable[ + [developer_connect.CreateConnectionRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create connection method over gRPC. + + Creates a new Connection in a given project and + location. + + Returns: + Callable[[~.CreateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_connection" not in self._stubs: + self._stubs["create_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/CreateConnection", + request_serializer=developer_connect.CreateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_connection"] + + @property + def update_connection( + self, + ) -> Callable[ + [developer_connect.UpdateConnectionRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update connection method over gRPC. + + Updates the parameters of a single Connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_connection" not in self._stubs: + self._stubs["update_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/UpdateConnection", + request_serializer=developer_connect.UpdateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_connection"] + + @property + def delete_connection( + self, + ) -> Callable[ + [developer_connect.DeleteConnectionRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes a single Connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_connection" not in self._stubs: + self._stubs["delete_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/DeleteConnection", + request_serializer=developer_connect.DeleteConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_connection"] + + @property + def create_git_repository_link( + self, + ) -> Callable[ + [developer_connect.CreateGitRepositoryLinkRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create git repository link method over gRPC. + + Creates a GitRepositoryLink. Upon linking a Git + Repository, Developer Connect will configure the Git + Repository to send webhook events to Developer Connect. + Connections that use Firebase GitHub Application will + have events forwarded to the Firebase service. All other + Connections will have events forwarded to Cloud Build. + + Returns: + Callable[[~.CreateGitRepositoryLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_git_repository_link" not in self._stubs: + self._stubs["create_git_repository_link"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/CreateGitRepositoryLink", + request_serializer=developer_connect.CreateGitRepositoryLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_git_repository_link"] + + @property + def delete_git_repository_link( + self, + ) -> Callable[ + [developer_connect.DeleteGitRepositoryLinkRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete git repository link method over gRPC. + + Deletes a single GitRepositoryLink. + + Returns: + Callable[[~.DeleteGitRepositoryLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_git_repository_link" not in self._stubs: + self._stubs["delete_git_repository_link"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/DeleteGitRepositoryLink", + request_serializer=developer_connect.DeleteGitRepositoryLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_git_repository_link"] + + @property + def list_git_repository_links( + self, + ) -> Callable[ + [developer_connect.ListGitRepositoryLinksRequest], + developer_connect.ListGitRepositoryLinksResponse, + ]: + r"""Return a callable for the list git repository links method over gRPC. + + Lists GitRepositoryLinks in a given project, + location, and connection. + + Returns: + Callable[[~.ListGitRepositoryLinksRequest], + ~.ListGitRepositoryLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_git_repository_links" not in self._stubs: + self._stubs["list_git_repository_links"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/ListGitRepositoryLinks", + request_serializer=developer_connect.ListGitRepositoryLinksRequest.serialize, + response_deserializer=developer_connect.ListGitRepositoryLinksResponse.deserialize, + ) + return self._stubs["list_git_repository_links"] + + @property + def get_git_repository_link( + self, + ) -> Callable[ + [developer_connect.GetGitRepositoryLinkRequest], + developer_connect.GitRepositoryLink, + ]: + r"""Return a callable for the get git repository link method over gRPC. + + Gets details of a single GitRepositoryLink. + + Returns: + Callable[[~.GetGitRepositoryLinkRequest], + ~.GitRepositoryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_git_repository_link" not in self._stubs: + self._stubs["get_git_repository_link"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/GetGitRepositoryLink", + request_serializer=developer_connect.GetGitRepositoryLinkRequest.serialize, + response_deserializer=developer_connect.GitRepositoryLink.deserialize, + ) + return self._stubs["get_git_repository_link"] + + @property + def fetch_read_write_token( + self, + ) -> Callable[ + [developer_connect.FetchReadWriteTokenRequest], + developer_connect.FetchReadWriteTokenResponse, + ]: + r"""Return a callable for the fetch read write token method over gRPC. + + Fetches read/write token of a given + gitRepositoryLink. + + Returns: + Callable[[~.FetchReadWriteTokenRequest], + ~.FetchReadWriteTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_read_write_token" not in self._stubs: + self._stubs["fetch_read_write_token"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchReadWriteToken", + request_serializer=developer_connect.FetchReadWriteTokenRequest.serialize, + response_deserializer=developer_connect.FetchReadWriteTokenResponse.deserialize, + ) + return self._stubs["fetch_read_write_token"] + + @property + def fetch_read_token( + self, + ) -> Callable[ + [developer_connect.FetchReadTokenRequest], + developer_connect.FetchReadTokenResponse, + ]: + r"""Return a callable for the fetch read token method over gRPC. + + Fetches read token of a given gitRepositoryLink. + + Returns: + Callable[[~.FetchReadTokenRequest], + ~.FetchReadTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_read_token" not in self._stubs: + self._stubs["fetch_read_token"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchReadToken", + request_serializer=developer_connect.FetchReadTokenRequest.serialize, + response_deserializer=developer_connect.FetchReadTokenResponse.deserialize, + ) + return self._stubs["fetch_read_token"] + + @property + def fetch_linkable_git_repositories( + self, + ) -> Callable[ + [developer_connect.FetchLinkableGitRepositoriesRequest], + developer_connect.FetchLinkableGitRepositoriesResponse, + ]: + r"""Return a callable for the fetch linkable git + repositories method over gRPC. + + FetchLinkableGitRepositories returns a list of git + repositories from an SCM that are available to be added + to a Connection. + + Returns: + Callable[[~.FetchLinkableGitRepositoriesRequest], + ~.FetchLinkableGitRepositoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_linkable_git_repositories" not in self._stubs: + self._stubs[ + "fetch_linkable_git_repositories" + ] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchLinkableGitRepositories", + request_serializer=developer_connect.FetchLinkableGitRepositoriesRequest.serialize, + response_deserializer=developer_connect.FetchLinkableGitRepositoriesResponse.deserialize, + ) + return self._stubs["fetch_linkable_git_repositories"] + + @property + def fetch_git_hub_installations( + self, + ) -> Callable[ + [developer_connect.FetchGitHubInstallationsRequest], + developer_connect.FetchGitHubInstallationsResponse, + ]: + r"""Return a callable for the fetch git hub installations method over gRPC. + + FetchGitHubInstallations returns the list of GitHub + Installations that are available to be added to a + Connection. For github.com, only installations + accessible to the authorizer token are returned. For + GitHub Enterprise, all installations are returned. + + Returns: + Callable[[~.FetchGitHubInstallationsRequest], + ~.FetchGitHubInstallationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_git_hub_installations" not in self._stubs: + self._stubs["fetch_git_hub_installations"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchGitHubInstallations", + request_serializer=developer_connect.FetchGitHubInstallationsRequest.serialize, + response_deserializer=developer_connect.FetchGitHubInstallationsResponse.deserialize, + ) + return self._stubs["fetch_git_hub_installations"] + + @property + def fetch_git_refs( + self, + ) -> Callable[ + [developer_connect.FetchGitRefsRequest], developer_connect.FetchGitRefsResponse + ]: + r"""Return a callable for the fetch git refs method over gRPC. + + Fetch the list of branches or tags for a given + repository. + + Returns: + Callable[[~.FetchGitRefsRequest], + ~.FetchGitRefsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_git_refs" not in self._stubs: + self._stubs["fetch_git_refs"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchGitRefs", + request_serializer=developer_connect.FetchGitRefsRequest.serialize, + response_deserializer=developer_connect.FetchGitRefsResponse.deserialize, + ) + return self._stubs["fetch_git_refs"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DeveloperConnectGrpcTransport",) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/grpc_asyncio.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6b90b43afb1a --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/grpc_asyncio.py @@ -0,0 +1,973 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.developerconnect_v1.types import developer_connect + +from .base import DEFAULT_CLIENT_INFO, DeveloperConnectTransport +from .grpc import DeveloperConnectGrpcTransport + + +class DeveloperConnectGrpcAsyncIOTransport(DeveloperConnectTransport): + """gRPC AsyncIO backend transport for DeveloperConnect. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "developerconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "developerconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'developerconnect.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_connections( + self, + ) -> Callable[ + [developer_connect.ListConnectionsRequest], + Awaitable[developer_connect.ListConnectionsResponse], + ]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a given project and location. + + Returns: + Callable[[~.ListConnectionsRequest], + Awaitable[~.ListConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_connections" not in self._stubs: + self._stubs["list_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/ListConnections", + request_serializer=developer_connect.ListConnectionsRequest.serialize, + response_deserializer=developer_connect.ListConnectionsResponse.deserialize, + ) + return self._stubs["list_connections"] + + @property + def get_connection( + self, + ) -> Callable[ + [developer_connect.GetConnectionRequest], + Awaitable[developer_connect.Connection], + ]: + r"""Return a callable for the get connection method over gRPC. + + Gets details of a single Connection. + + Returns: + Callable[[~.GetConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection" not in self._stubs: + self._stubs["get_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/GetConnection", + request_serializer=developer_connect.GetConnectionRequest.serialize, + response_deserializer=developer_connect.Connection.deserialize, + ) + return self._stubs["get_connection"] + + @property + def create_connection( + self, + ) -> Callable[ + [developer_connect.CreateConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create connection method over gRPC. + + Creates a new Connection in a given project and + location. + + Returns: + Callable[[~.CreateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_connection" not in self._stubs: + self._stubs["create_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/CreateConnection", + request_serializer=developer_connect.CreateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_connection"] + + @property + def update_connection( + self, + ) -> Callable[ + [developer_connect.UpdateConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update connection method over gRPC. + + Updates the parameters of a single Connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_connection" not in self._stubs: + self._stubs["update_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/UpdateConnection", + request_serializer=developer_connect.UpdateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_connection"] + + @property + def delete_connection( + self, + ) -> Callable[ + [developer_connect.DeleteConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes a single Connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_connection" not in self._stubs: + self._stubs["delete_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/DeleteConnection", + request_serializer=developer_connect.DeleteConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_connection"] + + @property + def create_git_repository_link( + self, + ) -> Callable[ + [developer_connect.CreateGitRepositoryLinkRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create git repository link method over gRPC. + + Creates a GitRepositoryLink. Upon linking a Git + Repository, Developer Connect will configure the Git + Repository to send webhook events to Developer Connect. + Connections that use Firebase GitHub Application will + have events forwarded to the Firebase service. All other + Connections will have events forwarded to Cloud Build. + + Returns: + Callable[[~.CreateGitRepositoryLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_git_repository_link" not in self._stubs: + self._stubs["create_git_repository_link"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/CreateGitRepositoryLink", + request_serializer=developer_connect.CreateGitRepositoryLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_git_repository_link"] + + @property + def delete_git_repository_link( + self, + ) -> Callable[ + [developer_connect.DeleteGitRepositoryLinkRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete git repository link method over gRPC. + + Deletes a single GitRepositoryLink. + + Returns: + Callable[[~.DeleteGitRepositoryLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_git_repository_link" not in self._stubs: + self._stubs["delete_git_repository_link"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/DeleteGitRepositoryLink", + request_serializer=developer_connect.DeleteGitRepositoryLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_git_repository_link"] + + @property + def list_git_repository_links( + self, + ) -> Callable[ + [developer_connect.ListGitRepositoryLinksRequest], + Awaitable[developer_connect.ListGitRepositoryLinksResponse], + ]: + r"""Return a callable for the list git repository links method over gRPC. + + Lists GitRepositoryLinks in a given project, + location, and connection. + + Returns: + Callable[[~.ListGitRepositoryLinksRequest], + Awaitable[~.ListGitRepositoryLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_git_repository_links" not in self._stubs: + self._stubs["list_git_repository_links"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/ListGitRepositoryLinks", + request_serializer=developer_connect.ListGitRepositoryLinksRequest.serialize, + response_deserializer=developer_connect.ListGitRepositoryLinksResponse.deserialize, + ) + return self._stubs["list_git_repository_links"] + + @property + def get_git_repository_link( + self, + ) -> Callable[ + [developer_connect.GetGitRepositoryLinkRequest], + Awaitable[developer_connect.GitRepositoryLink], + ]: + r"""Return a callable for the get git repository link method over gRPC. + + Gets details of a single GitRepositoryLink. + + Returns: + Callable[[~.GetGitRepositoryLinkRequest], + Awaitable[~.GitRepositoryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_git_repository_link" not in self._stubs: + self._stubs["get_git_repository_link"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/GetGitRepositoryLink", + request_serializer=developer_connect.GetGitRepositoryLinkRequest.serialize, + response_deserializer=developer_connect.GitRepositoryLink.deserialize, + ) + return self._stubs["get_git_repository_link"] + + @property + def fetch_read_write_token( + self, + ) -> Callable[ + [developer_connect.FetchReadWriteTokenRequest], + Awaitable[developer_connect.FetchReadWriteTokenResponse], + ]: + r"""Return a callable for the fetch read write token method over gRPC. + + Fetches read/write token of a given + gitRepositoryLink. + + Returns: + Callable[[~.FetchReadWriteTokenRequest], + Awaitable[~.FetchReadWriteTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_read_write_token" not in self._stubs: + self._stubs["fetch_read_write_token"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchReadWriteToken", + request_serializer=developer_connect.FetchReadWriteTokenRequest.serialize, + response_deserializer=developer_connect.FetchReadWriteTokenResponse.deserialize, + ) + return self._stubs["fetch_read_write_token"] + + @property + def fetch_read_token( + self, + ) -> Callable[ + [developer_connect.FetchReadTokenRequest], + Awaitable[developer_connect.FetchReadTokenResponse], + ]: + r"""Return a callable for the fetch read token method over gRPC. + + Fetches read token of a given gitRepositoryLink. + + Returns: + Callable[[~.FetchReadTokenRequest], + Awaitable[~.FetchReadTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_read_token" not in self._stubs: + self._stubs["fetch_read_token"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchReadToken", + request_serializer=developer_connect.FetchReadTokenRequest.serialize, + response_deserializer=developer_connect.FetchReadTokenResponse.deserialize, + ) + return self._stubs["fetch_read_token"] + + @property + def fetch_linkable_git_repositories( + self, + ) -> Callable[ + [developer_connect.FetchLinkableGitRepositoriesRequest], + Awaitable[developer_connect.FetchLinkableGitRepositoriesResponse], + ]: + r"""Return a callable for the fetch linkable git + repositories method over gRPC. + + FetchLinkableGitRepositories returns a list of git + repositories from an SCM that are available to be added + to a Connection. + + Returns: + Callable[[~.FetchLinkableGitRepositoriesRequest], + Awaitable[~.FetchLinkableGitRepositoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_linkable_git_repositories" not in self._stubs: + self._stubs[ + "fetch_linkable_git_repositories" + ] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchLinkableGitRepositories", + request_serializer=developer_connect.FetchLinkableGitRepositoriesRequest.serialize, + response_deserializer=developer_connect.FetchLinkableGitRepositoriesResponse.deserialize, + ) + return self._stubs["fetch_linkable_git_repositories"] + + @property + def fetch_git_hub_installations( + self, + ) -> Callable[ + [developer_connect.FetchGitHubInstallationsRequest], + Awaitable[developer_connect.FetchGitHubInstallationsResponse], + ]: + r"""Return a callable for the fetch git hub installations method over gRPC. + + FetchGitHubInstallations returns the list of GitHub + Installations that are available to be added to a + Connection. For github.com, only installations + accessible to the authorizer token are returned. For + GitHub Enterprise, all installations are returned. + + Returns: + Callable[[~.FetchGitHubInstallationsRequest], + Awaitable[~.FetchGitHubInstallationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_git_hub_installations" not in self._stubs: + self._stubs["fetch_git_hub_installations"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchGitHubInstallations", + request_serializer=developer_connect.FetchGitHubInstallationsRequest.serialize, + response_deserializer=developer_connect.FetchGitHubInstallationsResponse.deserialize, + ) + return self._stubs["fetch_git_hub_installations"] + + @property + def fetch_git_refs( + self, + ) -> Callable[ + [developer_connect.FetchGitRefsRequest], + Awaitable[developer_connect.FetchGitRefsResponse], + ]: + r"""Return a callable for the fetch git refs method over gRPC. + + Fetch the list of branches or tags for a given + repository. + + Returns: + Callable[[~.FetchGitRefsRequest], + Awaitable[~.FetchGitRefsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_git_refs" not in self._stubs: + self._stubs["fetch_git_refs"] = self.grpc_channel.unary_unary( + "/google.cloud.developerconnect.v1.DeveloperConnect/FetchGitRefs", + request_serializer=developer_connect.FetchGitRefsRequest.serialize, + response_deserializer=developer_connect.FetchGitRefsResponse.deserialize, + ) + return self._stubs["fetch_git_refs"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_connections: gapic_v1.method_async.wrap_method( + self.list_connections, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection: gapic_v1.method_async.wrap_method( + self.get_connection, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_connection: gapic_v1.method_async.wrap_method( + self.create_connection, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection: gapic_v1.method_async.wrap_method( + self.update_connection, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection: gapic_v1.method_async.wrap_method( + self.delete_connection, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_git_repository_link: gapic_v1.method_async.wrap_method( + self.create_git_repository_link, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_git_repository_link: gapic_v1.method_async.wrap_method( + self.delete_git_repository_link, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_git_repository_links: gapic_v1.method_async.wrap_method( + self.list_git_repository_links, + default_timeout=None, + client_info=client_info, + ), + self.get_git_repository_link: gapic_v1.method_async.wrap_method( + self.get_git_repository_link, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_write_token: gapic_v1.method_async.wrap_method( + self.fetch_read_write_token, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_token: gapic_v1.method_async.wrap_method( + self.fetch_read_token, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_linkable_git_repositories: gapic_v1.method_async.wrap_method( + self.fetch_linkable_git_repositories, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_git_hub_installations: gapic_v1.method_async.wrap_method( + self.fetch_git_hub_installations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_git_refs: gapic_v1.method_async.wrap_method( + self.fetch_git_refs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("DeveloperConnectGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py new file mode 100644 index 000000000000..6fbc74ddaea4 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py @@ -0,0 +1,2648 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.developerconnect_v1.types import developer_connect + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DeveloperConnectTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DeveloperConnectRestInterceptor: + """Interceptor for DeveloperConnect. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DeveloperConnectRestTransport. + + .. code-block:: python + class MyCustomDeveloperConnectInterceptor(DeveloperConnectRestInterceptor): + def pre_create_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_git_repository_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_git_repository_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_git_repository_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_git_repository_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_git_hub_installations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_git_hub_installations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_git_refs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_git_refs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_linkable_git_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_linkable_git_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_read_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_read_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_read_write_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_read_write_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_git_repository_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_git_repository_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_git_repository_links(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_git_repository_links(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DeveloperConnectRestTransport(interceptor=MyCustomDeveloperConnectInterceptor()) + client = DeveloperConnectClient(transport=transport) + + + """ + + def pre_create_connection( + self, + request: developer_connect.CreateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.CreateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_create_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_connection + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_create_git_repository_link( + self, + request: developer_connect.CreateGitRepositoryLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + developer_connect.CreateGitRepositoryLinkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_git_repository_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_create_git_repository_link( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_git_repository_link + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_delete_connection( + self, + request: developer_connect.DeleteConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.DeleteConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_delete_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_connection + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_delete_git_repository_link( + self, + request: developer_connect.DeleteGitRepositoryLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + developer_connect.DeleteGitRepositoryLinkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_git_repository_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_delete_git_repository_link( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_git_repository_link + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_fetch_git_hub_installations( + self, + request: developer_connect.FetchGitHubInstallationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + developer_connect.FetchGitHubInstallationsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for fetch_git_hub_installations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_fetch_git_hub_installations( + self, response: developer_connect.FetchGitHubInstallationsResponse + ) -> developer_connect.FetchGitHubInstallationsResponse: + """Post-rpc interceptor for fetch_git_hub_installations + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_fetch_git_refs( + self, + request: developer_connect.FetchGitRefsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.FetchGitRefsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_git_refs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_fetch_git_refs( + self, response: developer_connect.FetchGitRefsResponse + ) -> developer_connect.FetchGitRefsResponse: + """Post-rpc interceptor for fetch_git_refs + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_fetch_linkable_git_repositories( + self, + request: developer_connect.FetchLinkableGitRepositoriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + developer_connect.FetchLinkableGitRepositoriesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for fetch_linkable_git_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_fetch_linkable_git_repositories( + self, response: developer_connect.FetchLinkableGitRepositoriesResponse + ) -> developer_connect.FetchLinkableGitRepositoriesResponse: + """Post-rpc interceptor for fetch_linkable_git_repositories + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_fetch_read_token( + self, + request: developer_connect.FetchReadTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.FetchReadTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_read_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_fetch_read_token( + self, response: developer_connect.FetchReadTokenResponse + ) -> developer_connect.FetchReadTokenResponse: + """Post-rpc interceptor for fetch_read_token + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_fetch_read_write_token( + self, + request: developer_connect.FetchReadWriteTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.FetchReadWriteTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_read_write_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_fetch_read_write_token( + self, response: developer_connect.FetchReadWriteTokenResponse + ) -> developer_connect.FetchReadWriteTokenResponse: + """Post-rpc interceptor for fetch_read_write_token + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_get_connection( + self, + request: developer_connect.GetConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.GetConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_get_connection( + self, response: developer_connect.Connection + ) -> developer_connect.Connection: + """Post-rpc interceptor for get_connection + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_get_git_repository_link( + self, + request: developer_connect.GetGitRepositoryLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + developer_connect.GetGitRepositoryLinkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_git_repository_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_get_git_repository_link( + self, response: developer_connect.GitRepositoryLink + ) -> developer_connect.GitRepositoryLink: + """Post-rpc interceptor for get_git_repository_link + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_list_connections( + self, + request: developer_connect.ListConnectionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.ListConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_list_connections( + self, response: developer_connect.ListConnectionsResponse + ) -> developer_connect.ListConnectionsResponse: + """Post-rpc interceptor for list_connections + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_list_git_repository_links( + self, + request: developer_connect.ListGitRepositoryLinksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + developer_connect.ListGitRepositoryLinksRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_git_repository_links + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_list_git_repository_links( + self, response: developer_connect.ListGitRepositoryLinksResponse + ) -> developer_connect.ListGitRepositoryLinksResponse: + """Post-rpc interceptor for list_git_repository_links + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_update_connection( + self, + request: developer_connect.UpdateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[developer_connect.UpdateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_update_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_connection + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DeveloperConnect server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DeveloperConnect server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DeveloperConnectRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DeveloperConnectRestInterceptor + + +class DeveloperConnectRestTransport(DeveloperConnectTransport): + """REST backend transport for DeveloperConnect. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "developerconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DeveloperConnectRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'developerconnect.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DeveloperConnectRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateConnection(DeveloperConnectRestStub): + def __hash__(self): + return hash("CreateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "connectionId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.CreateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create connection method over HTTP. + + Args: + request (~.developer_connect.CreateConnectionRequest): + The request object. Message for creating a Connection + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/connections", + "body": "connection", + }, + ] + request, metadata = self._interceptor.pre_create_connection( + request, metadata + ) + pb_request = developer_connect.CreateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_connection(resp) + return resp + + class _CreateGitRepositoryLink(DeveloperConnectRestStub): + def __hash__(self): + return hash("CreateGitRepositoryLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "gitRepositoryLinkId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.CreateGitRepositoryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create git repository + link method over HTTP. + + Args: + request (~.developer_connect.CreateGitRepositoryLinkRequest): + The request object. Message for creating a + GitRepositoryLink + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/connections/*}/gitRepositoryLinks", + "body": "git_repository_link", + }, + ] + request, metadata = self._interceptor.pre_create_git_repository_link( + request, metadata + ) + pb_request = developer_connect.CreateGitRepositoryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_git_repository_link(resp) + return resp + + class _DeleteConnection(DeveloperConnectRestStub): + def __hash__(self): + return hash("DeleteConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.DeleteConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete connection method over HTTP. + + Args: + request (~.developer_connect.DeleteConnectionRequest): + The request object. Message for deleting a Connection + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/connections/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_connection( + request, metadata + ) + pb_request = developer_connect.DeleteConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_connection(resp) + return resp + + class _DeleteGitRepositoryLink(DeveloperConnectRestStub): + def __hash__(self): + return hash("DeleteGitRepositoryLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.DeleteGitRepositoryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete git repository + link method over HTTP. + + Args: + request (~.developer_connect.DeleteGitRepositoryLinkRequest): + The request object. Message for deleting a + GitRepositoryLink + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/connections/*/gitRepositoryLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_git_repository_link( + request, metadata + ) + pb_request = developer_connect.DeleteGitRepositoryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_git_repository_link(resp) + return resp + + class _FetchGitHubInstallations(DeveloperConnectRestStub): + def __hash__(self): + return hash("FetchGitHubInstallations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.FetchGitHubInstallationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchGitHubInstallationsResponse: + r"""Call the fetch git hub + installations method over HTTP. + + Args: + request (~.developer_connect.FetchGitHubInstallationsRequest): + The request object. Request for fetching github + installations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.FetchGitHubInstallationsResponse: + Response of fetching github + installations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{connection=projects/*/locations/*/connections/*}:fetchGitHubInstallations", + }, + ] + request, metadata = self._interceptor.pre_fetch_git_hub_installations( + request, metadata + ) + pb_request = developer_connect.FetchGitHubInstallationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.FetchGitHubInstallationsResponse() + pb_resp = developer_connect.FetchGitHubInstallationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_git_hub_installations(resp) + return resp + + class _FetchGitRefs(DeveloperConnectRestStub): + def __hash__(self): + return hash("FetchGitRefs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "refType": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.FetchGitRefsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchGitRefsResponse: + r"""Call the fetch git refs method over HTTP. + + Args: + request (~.developer_connect.FetchGitRefsRequest): + The request object. Request for fetching git refs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.FetchGitRefsResponse: + Response for fetching git refs. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{git_repository_link=projects/*/locations/*/connections/*/gitRepositoryLinks/*}:fetchGitRefs", + }, + ] + request, metadata = self._interceptor.pre_fetch_git_refs(request, metadata) + pb_request = developer_connect.FetchGitRefsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.FetchGitRefsResponse() + pb_resp = developer_connect.FetchGitRefsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_git_refs(resp) + return resp + + class _FetchLinkableGitRepositories(DeveloperConnectRestStub): + def __hash__(self): + return hash("FetchLinkableGitRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.FetchLinkableGitRepositoriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchLinkableGitRepositoriesResponse: + r"""Call the fetch linkable git + repositories method over HTTP. + + Args: + request (~.developer_connect.FetchLinkableGitRepositoriesRequest): + The request object. Request message for + FetchLinkableGitRepositoriesRequest. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.FetchLinkableGitRepositoriesResponse: + Response message for + FetchLinkableGitRepositories. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{connection=projects/*/locations/*/connections/*}:fetchLinkableGitRepositories", + }, + ] + request, metadata = self._interceptor.pre_fetch_linkable_git_repositories( + request, metadata + ) + pb_request = developer_connect.FetchLinkableGitRepositoriesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.FetchLinkableGitRepositoriesResponse() + pb_resp = developer_connect.FetchLinkableGitRepositoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_linkable_git_repositories(resp) + return resp + + class _FetchReadToken(DeveloperConnectRestStub): + def __hash__(self): + return hash("FetchReadToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.FetchReadTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchReadTokenResponse: + r"""Call the fetch read token method over HTTP. + + Args: + request (~.developer_connect.FetchReadTokenRequest): + The request object. Message for fetching SCM read token. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{git_repository_link=projects/*/locations/*/connections/*/gitRepositoryLinks/*}:fetchReadToken", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_fetch_read_token( + request, metadata + ) + pb_request = developer_connect.FetchReadTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.FetchReadTokenResponse() + pb_resp = developer_connect.FetchReadTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_read_token(resp) + return resp + + class _FetchReadWriteToken(DeveloperConnectRestStub): + def __hash__(self): + return hash("FetchReadWriteToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.FetchReadWriteTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.FetchReadWriteTokenResponse: + r"""Call the fetch read write token method over HTTP. + + Args: + request (~.developer_connect.FetchReadWriteTokenRequest): + The request object. Message for fetching SCM read/write + token. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{git_repository_link=projects/*/locations/*/connections/*/gitRepositoryLinks/*}:fetchReadWriteToken", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_fetch_read_write_token( + request, metadata + ) + pb_request = developer_connect.FetchReadWriteTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.FetchReadWriteTokenResponse() + pb_resp = developer_connect.FetchReadWriteTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_read_write_token(resp) + return resp + + class _GetConnection(DeveloperConnectRestStub): + def __hash__(self): + return hash("GetConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.GetConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.Connection: + r"""Call the get connection method over HTTP. + + Args: + request (~.developer_connect.GetConnectionRequest): + The request object. Message for getting a Connection + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.Connection: + Message describing Connection object + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/connections/*}", + }, + ] + request, metadata = self._interceptor.pre_get_connection(request, metadata) + pb_request = developer_connect.GetConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.Connection() + pb_resp = developer_connect.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection(resp) + return resp + + class _GetGitRepositoryLink(DeveloperConnectRestStub): + def __hash__(self): + return hash("GetGitRepositoryLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.GetGitRepositoryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.GitRepositoryLink: + r"""Call the get git repository link method over HTTP. + + Args: + request (~.developer_connect.GetGitRepositoryLinkRequest): + The request object. Message for getting a + GitRepositoryLink + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.GitRepositoryLink: + Message describing the + GitRepositoryLink object + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/connections/*/gitRepositoryLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_git_repository_link( + request, metadata + ) + pb_request = developer_connect.GetGitRepositoryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.GitRepositoryLink() + pb_resp = developer_connect.GitRepositoryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_git_repository_link(resp) + return resp + + class _ListConnections(DeveloperConnectRestStub): + def __hash__(self): + return hash("ListConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.ListConnectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.ListConnectionsResponse: + r"""Call the list connections method over HTTP. + + Args: + request (~.developer_connect.ListConnectionsRequest): + The request object. Message for requesting list of + Connections + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.ListConnectionsResponse: + Message for response to listing + Connections + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/connections", + }, + ] + request, metadata = self._interceptor.pre_list_connections( + request, metadata + ) + pb_request = developer_connect.ListConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.ListConnectionsResponse() + pb_resp = developer_connect.ListConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_connections(resp) + return resp + + class _ListGitRepositoryLinks(DeveloperConnectRestStub): + def __hash__(self): + return hash("ListGitRepositoryLinks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.ListGitRepositoryLinksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> developer_connect.ListGitRepositoryLinksResponse: + r"""Call the list git repository links method over HTTP. + + Args: + request (~.developer_connect.ListGitRepositoryLinksRequest): + The request object. Message for requesting a list of + GitRepositoryLinks + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.developer_connect.ListGitRepositoryLinksResponse: + Message for response to listing + GitRepositoryLinks + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/connections/*}/gitRepositoryLinks", + }, + ] + request, metadata = self._interceptor.pre_list_git_repository_links( + request, metadata + ) + pb_request = developer_connect.ListGitRepositoryLinksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = developer_connect.ListGitRepositoryLinksResponse() + pb_resp = developer_connect.ListGitRepositoryLinksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_git_repository_links(resp) + return resp + + class _UpdateConnection(DeveloperConnectRestStub): + def __hash__(self): + return hash("UpdateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: developer_connect.UpdateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update connection method over HTTP. + + Args: + request (~.developer_connect.UpdateConnectionRequest): + The request object. Message for updating a Connection + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{connection.name=projects/*/locations/*/connections/*}", + "body": "connection", + }, + ] + request, metadata = self._interceptor.pre_update_connection( + request, metadata + ) + pb_request = developer_connect.UpdateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_connection(resp) + return resp + + @property + def create_connection( + self, + ) -> Callable[ + [developer_connect.CreateConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_git_repository_link( + self, + ) -> Callable[ + [developer_connect.CreateGitRepositoryLinkRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGitRepositoryLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection( + self, + ) -> Callable[ + [developer_connect.DeleteConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_git_repository_link( + self, + ) -> Callable[ + [developer_connect.DeleteGitRepositoryLinkRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGitRepositoryLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_git_hub_installations( + self, + ) -> Callable[ + [developer_connect.FetchGitHubInstallationsRequest], + developer_connect.FetchGitHubInstallationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchGitHubInstallations(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_git_refs( + self, + ) -> Callable[ + [developer_connect.FetchGitRefsRequest], developer_connect.FetchGitRefsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchGitRefs(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_linkable_git_repositories( + self, + ) -> Callable[ + [developer_connect.FetchLinkableGitRepositoriesRequest], + developer_connect.FetchLinkableGitRepositoriesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchLinkableGitRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_read_token( + self, + ) -> Callable[ + [developer_connect.FetchReadTokenRequest], + developer_connect.FetchReadTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReadToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_read_write_token( + self, + ) -> Callable[ + [developer_connect.FetchReadWriteTokenRequest], + developer_connect.FetchReadWriteTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReadWriteToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection( + self, + ) -> Callable[ + [developer_connect.GetConnectionRequest], developer_connect.Connection + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_git_repository_link( + self, + ) -> Callable[ + [developer_connect.GetGitRepositoryLinkRequest], + developer_connect.GitRepositoryLink, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGitRepositoryLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connections( + self, + ) -> Callable[ + [developer_connect.ListConnectionsRequest], + developer_connect.ListConnectionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_git_repository_links( + self, + ) -> Callable[ + [developer_connect.ListGitRepositoryLinksRequest], + developer_connect.ListGitRepositoryLinksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGitRepositoryLinks(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection( + self, + ) -> Callable[ + [developer_connect.UpdateConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DeveloperConnectRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DeveloperConnectRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DeveloperConnectRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(DeveloperConnectRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DeveloperConnectRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DeveloperConnectRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DeveloperConnectRestTransport",) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/types/__init__.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/types/__init__.py new file mode 100644 index 000000000000..b42698fc197c --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/types/__init__.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .developer_connect import ( + Connection, + CreateConnectionRequest, + CreateGitRepositoryLinkRequest, + DeleteConnectionRequest, + DeleteGitRepositoryLinkRequest, + FetchGitHubInstallationsRequest, + FetchGitHubInstallationsResponse, + FetchGitRefsRequest, + FetchGitRefsResponse, + FetchLinkableGitRepositoriesRequest, + FetchLinkableGitRepositoriesResponse, + FetchReadTokenRequest, + FetchReadTokenResponse, + FetchReadWriteTokenRequest, + FetchReadWriteTokenResponse, + GetConnectionRequest, + GetGitRepositoryLinkRequest, + GitHubConfig, + GitRepositoryLink, + InstallationState, + LinkableGitRepository, + ListConnectionsRequest, + ListConnectionsResponse, + ListGitRepositoryLinksRequest, + ListGitRepositoryLinksResponse, + OAuthCredential, + OperationMetadata, + UpdateConnectionRequest, +) + +__all__ = ( + "Connection", + "CreateConnectionRequest", + "CreateGitRepositoryLinkRequest", + "DeleteConnectionRequest", + "DeleteGitRepositoryLinkRequest", + "FetchGitHubInstallationsRequest", + "FetchGitHubInstallationsResponse", + "FetchGitRefsRequest", + "FetchGitRefsResponse", + "FetchLinkableGitRepositoriesRequest", + "FetchLinkableGitRepositoriesResponse", + "FetchReadTokenRequest", + "FetchReadTokenResponse", + "FetchReadWriteTokenRequest", + "FetchReadWriteTokenResponse", + "GetConnectionRequest", + "GetGitRepositoryLinkRequest", + "GitHubConfig", + "GitRepositoryLink", + "InstallationState", + "LinkableGitRepository", + "ListConnectionsRequest", + "ListConnectionsResponse", + "ListGitRepositoryLinksRequest", + "ListGitRepositoryLinksResponse", + "OAuthCredential", + "OperationMetadata", + "UpdateConnectionRequest", +) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/types/developer_connect.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/types/developer_connect.py new file mode 100644 index 000000000000..b55088206406 --- /dev/null +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/types/developer_connect.py @@ -0,0 +1,1225 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.developerconnect.v1", + manifest={ + "Connection", + "InstallationState", + "GitHubConfig", + "OAuthCredential", + "ListConnectionsRequest", + "ListConnectionsResponse", + "GetConnectionRequest", + "CreateConnectionRequest", + "UpdateConnectionRequest", + "DeleteConnectionRequest", + "OperationMetadata", + "GitRepositoryLink", + "CreateGitRepositoryLinkRequest", + "DeleteGitRepositoryLinkRequest", + "ListGitRepositoryLinksRequest", + "ListGitRepositoryLinksResponse", + "GetGitRepositoryLinkRequest", + "FetchReadWriteTokenRequest", + "FetchReadTokenRequest", + "FetchReadTokenResponse", + "FetchReadWriteTokenResponse", + "FetchLinkableGitRepositoriesRequest", + "FetchLinkableGitRepositoriesResponse", + "LinkableGitRepository", + "FetchGitHubInstallationsRequest", + "FetchGitHubInstallationsResponse", + "FetchGitRefsRequest", + "FetchGitRefsResponse", + }, +) + + +class Connection(proto.Message): + r"""Message describing Connection object + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + github_config (google.cloud.developerconnect_v1.types.GitHubConfig): + Configuration for connections to github.com. + + This field is a member of `oneof`_ ``connection_config``. + name (str): + Identifier. The resource name of the connection, in the + format + ``projects/{project}/locations/{location}/connections/{connection_id}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Create timestamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Update timestamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Delete timestamp + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs + installation_state (google.cloud.developerconnect_v1.types.InstallationState): + Output only. Installation state of the + Connection. + disabled (bool): + Optional. If disabled is set to true, + functionality is disabled for this connection. + Repository based API methods and webhooks + processing for repositories in this connection + will be disabled. + reconciling (bool): + Output only. Set to true when the connection + is being set up or updated in the background. + annotations (MutableMapping[str, str]): + Optional. Allows clients to store small + amounts of arbitrary data. + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + uid (str): + Output only. A system-assigned unique + identifier for a the GitRepositoryLink. + """ + + github_config: "GitHubConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="connection_config", + message="GitHubConfig", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + installation_state: "InstallationState" = proto.Field( + proto.MESSAGE, + number=6, + message="InstallationState", + ) + disabled: bool = proto.Field( + proto.BOOL, + number=7, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + uid: str = proto.Field( + proto.STRING, + number=12, + ) + + +class InstallationState(proto.Message): + r"""Describes stage and necessary actions to be taken by the + user to complete the installation. Used for GitHub and GitHub + Enterprise based connections. + + Attributes: + stage (google.cloud.developerconnect_v1.types.InstallationState.Stage): + Output only. Current step of the installation + process. + message (str): + Output only. Message of what the user should + do next to continue the installation. Empty + string if the installation is already complete. + action_uri (str): + Output only. Link to follow for next action. + Empty string if the installation is already + complete. + """ + + class Stage(proto.Enum): + r"""Stage of the installation process. + + Values: + STAGE_UNSPECIFIED (0): + No stage specified. + PENDING_CREATE_APP (1): + Only for GitHub Enterprise. An App creation + has been requested. The user needs to confirm + the creation in their GitHub enterprise host. + PENDING_USER_OAUTH (2): + User needs to authorize the GitHub (or + Enterprise) App via OAuth. + PENDING_INSTALL_APP (3): + User needs to follow the link to install the + GitHub (or Enterprise) App. + COMPLETE (10): + Installation process has been completed. + """ + STAGE_UNSPECIFIED = 0 + PENDING_CREATE_APP = 1 + PENDING_USER_OAUTH = 2 + PENDING_INSTALL_APP = 3 + COMPLETE = 10 + + stage: Stage = proto.Field( + proto.ENUM, + number=1, + enum=Stage, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + action_uri: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GitHubConfig(proto.Message): + r"""Configuration for connections to github.com. + + Attributes: + github_app (google.cloud.developerconnect_v1.types.GitHubConfig.GitHubApp): + Required. Immutable. The GitHub Application + that was installed to the GitHub user or + organization. + authorizer_credential (google.cloud.developerconnect_v1.types.OAuthCredential): + Optional. OAuth credential of the account + that authorized the GitHub App. It is + recommended to use a robot account instead of a + human user account. The OAuth token must be tied + to the GitHub App of this config. + app_installation_id (int): + Optional. GitHub App installation id. + installation_uri (str): + Output only. The URI to navigate to in order + to manage the installation associated with this + GitHubConfig. + """ + + class GitHubApp(proto.Enum): + r"""Represents the various GitHub Applications that can be + installed to a GitHub user or organization and used with + Developer Connect. + + Values: + GIT_HUB_APP_UNSPECIFIED (0): + GitHub App not specified. + DEVELOPER_CONNECT (1): + The Developer Connect GitHub Application. + FIREBASE (2): + The Firebase GitHub Application. + """ + GIT_HUB_APP_UNSPECIFIED = 0 + DEVELOPER_CONNECT = 1 + FIREBASE = 2 + + github_app: GitHubApp = proto.Field( + proto.ENUM, + number=1, + enum=GitHubApp, + ) + authorizer_credential: "OAuthCredential" = proto.Field( + proto.MESSAGE, + number=2, + message="OAuthCredential", + ) + app_installation_id: int = proto.Field( + proto.INT64, + number=3, + ) + installation_uri: str = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthCredential(proto.Message): + r"""Represents an OAuth token of the account that authorized the + Connection, and associated metadata. + + Attributes: + oauth_token_secret_version (str): + Required. A SecretManager resource containing the OAuth + token that authorizes the connection. Format: + ``projects/*/secrets/*/versions/*``. + username (str): + Output only. The username associated with + this token. + """ + + oauth_token_secret_version: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListConnectionsRequest(proto.Message): + r"""Message for requesting list of Connections + + Attributes: + parent (str): + Required. Parent value for + ListConnectionsRequest + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListConnectionsResponse(proto.Message): + r"""Message for response to listing Connections + + Attributes: + connections (MutableSequence[google.cloud.developerconnect_v1.types.Connection]): + The list of Connection + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + connections: MutableSequence["Connection"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Connection", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConnectionRequest(proto.Message): + r"""Message for getting a Connection + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConnectionRequest(proto.Message): + r"""Message for creating a Connection + + Attributes: + parent (str): + Required. Value for parent. + connection_id (str): + Required. Id of the requesting object If auto-generating Id + server-side, remove this field and connection_id from the + method_signature of Create RPC + connection (google.cloud.developerconnect_v1.types.Connection): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, validate the request, but + do not actually post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection_id: str = proto.Field( + proto.STRING, + number=2, + ) + connection: "Connection" = proto.Field( + proto.MESSAGE, + number=3, + message="Connection", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateConnectionRequest(proto.Message): + r"""Message for updating a Connection + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Connection resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + connection (google.cloud.developerconnect_v1.types.Connection): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, and the connection is not found a + new connection will be created. In this situation + ``update_mask`` is ignored. The creation will succeed only + if the input connection has all the necessary information + (e.g a github_config with both user_oauth_token and + installation_id properties). + validate_only (bool): + Optional. If set, validate the request, but + do not actually post it. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + connection: "Connection" = proto.Field( + proto.MESSAGE, + number=2, + message="Connection", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteConnectionRequest(proto.Message): + r"""Message for deleting a Connection + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, validate the request, but + do not actually post it. + etag (str): + Optional. The current etag of the Connection. + If an etag is provided and does not match the + current etag of the Connection, deletion will be + blocked and an ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class GitRepositoryLink(proto.Message): + r"""Message describing the GitRepositoryLink object + + Attributes: + name (str): + Identifier. Resource name of the repository, in the format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + clone_uri (str): + Required. Git Clone URI. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Create timestamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Update timestamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Delete timestamp + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + reconciling (bool): + Output only. Set to true when the connection + is being set up or updated in the background. + annotations (MutableMapping[str, str]): + Optional. Allows clients to store small + amounts of arbitrary data. + uid (str): + Output only. A system-assigned unique + identifier for a the GitRepositoryLink. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + clone_uri: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + etag: str = proto.Field( + proto.STRING, + number=7, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + uid: str = proto.Field( + proto.STRING, + number=10, + ) + + +class CreateGitRepositoryLinkRequest(proto.Message): + r"""Message for creating a GitRepositoryLink + + Attributes: + parent (str): + Required. Value for parent. + git_repository_link (google.cloud.developerconnect_v1.types.GitRepositoryLink): + Required. The resource being created + git_repository_link_id (str): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, validate the request, but + do not actually post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + git_repository_link: "GitRepositoryLink" = proto.Field( + proto.MESSAGE, + number=2, + message="GitRepositoryLink", + ) + git_repository_link_id: str = proto.Field( + proto.STRING, + number=3, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteGitRepositoryLinkRequest(proto.Message): + r"""Message for deleting a GitRepositoryLink + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, validate the request, but + do not actually post it. + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListGitRepositoryLinksRequest(proto.Message): + r"""Message for requesting a list of GitRepositoryLinks + + Attributes: + parent (str): + Required. Parent value for + ListGitRepositoryLinksRequest + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGitRepositoryLinksResponse(proto.Message): + r"""Message for response to listing GitRepositoryLinks + + Attributes: + git_repository_links (MutableSequence[google.cloud.developerconnect_v1.types.GitRepositoryLink]): + The list of GitRepositoryLinks + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + git_repository_links: MutableSequence["GitRepositoryLink"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GitRepositoryLink", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetGitRepositoryLinkRequest(proto.Message): + r"""Message for getting a GitRepositoryLink + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadWriteTokenRequest(proto.Message): + r"""Message for fetching SCM read/write token. + + Attributes: + git_repository_link (str): + Required. The resource name of the gitRepositoryLink in the + format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + """ + + git_repository_link: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadTokenRequest(proto.Message): + r"""Message for fetching SCM read token. + + Attributes: + git_repository_link (str): + Required. The resource name of the gitRepositoryLink in the + format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + """ + + git_repository_link: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadTokenResponse(proto.Message): + r"""Message for responding to get read token. + + Attributes: + token (str): + The token content. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Expiration timestamp. Can be empty if unknown + or non-expiring. + git_username (str): + The git_username to specify when making a git clone with the + token. For example, for GitHub GitRepositoryLinks, this + would be "x-access-token". + """ + + token: str = proto.Field( + proto.STRING, + number=1, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + git_username: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchReadWriteTokenResponse(proto.Message): + r"""Message for responding to get read/write token. + + Attributes: + token (str): + The token content. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Expiration timestamp. Can be empty if unknown + or non-expiring. + git_username (str): + The git_username to specify when making a git clone with the + token. For example, for GitHub GitRepositoryLinks, this + would be "x-access-token". + """ + + token: str = proto.Field( + proto.STRING, + number=1, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + git_username: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchLinkableGitRepositoriesRequest(proto.Message): + r"""Request message for FetchLinkableGitRepositoriesRequest. + + Attributes: + connection (str): + Required. The name of the Connection. Format: + ``projects/*/locations/*/connections/*``. + page_size (int): + Optional. Number of results to return in the + list. Defaults to 20. + page_token (str): + Optional. Page start. + """ + + connection: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchLinkableGitRepositoriesResponse(proto.Message): + r"""Response message for FetchLinkableGitRepositories. + + Attributes: + linkable_git_repositories (MutableSequence[google.cloud.developerconnect_v1.types.LinkableGitRepository]): + The git repositories that can be linked to + the connection. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + linkable_git_repositories: MutableSequence[ + "LinkableGitRepository" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LinkableGitRepository", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class LinkableGitRepository(proto.Message): + r"""LinkableGitRepository represents a git repository that can be + linked to a connection. + + Attributes: + clone_uri (str): + The clone uri of the repository. + """ + + clone_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchGitHubInstallationsRequest(proto.Message): + r"""Request for fetching github installations. + + Attributes: + connection (str): + Required. The resource name of the connection in the format + ``projects/*/locations/*/connections/*``. + """ + + connection: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchGitHubInstallationsResponse(proto.Message): + r"""Response of fetching github installations. + + Attributes: + installations (MutableSequence[google.cloud.developerconnect_v1.types.FetchGitHubInstallationsResponse.Installation]): + List of installations available to the OAuth + user (for github.com) or all the installations + (for GitHub enterprise). + """ + + class Installation(proto.Message): + r"""Represents an installation of the GitHub App. + + Attributes: + id (int): + ID of the installation in GitHub. + name (str): + Name of the GitHub user or organization that + owns this installation. + type_ (str): + Either "user" or "organization". + """ + + id: int = proto.Field( + proto.INT64, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + installations: MutableSequence[Installation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Installation, + ) + + +class FetchGitRefsRequest(proto.Message): + r"""Request for fetching git refs. + + Attributes: + git_repository_link (str): + Required. The resource name of GitRepositoryLink in the + format + ``projects/*/locations/*/connections/*/gitRepositoryLinks/*``. + ref_type (google.cloud.developerconnect_v1.types.FetchGitRefsRequest.RefType): + Required. Type of refs to fetch. + page_size (int): + Optional. Number of results to return in the + list. Default to 20. + page_token (str): + Optional. Page start. + """ + + class RefType(proto.Enum): + r"""Type of refs. + + Values: + REF_TYPE_UNSPECIFIED (0): + No type specified. + TAG (1): + To fetch tags. + BRANCH (2): + To fetch branches. + """ + REF_TYPE_UNSPECIFIED = 0 + TAG = 1 + BRANCH = 2 + + git_repository_link: str = proto.Field( + proto.STRING, + number=1, + ) + ref_type: RefType = proto.Field( + proto.ENUM, + number=2, + enum=RefType, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class FetchGitRefsResponse(proto.Message): + r"""Response for fetching git refs. + + Attributes: + ref_names (MutableSequence[str]): + Name of the refs fetched. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + ref_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-developerconnect/mypy.ini b/packages/google-cloud-developerconnect/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-developerconnect/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-developerconnect/noxfile.py b/packages/google-cloud-developerconnect/noxfile.py new file mode 100644 index 000000000000..1e6cd48d0529 --- /dev/null +++ b/packages/google-cloud-developerconnect/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_connection_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_connection_async.py new file mode 100644 index 000000000000..cf078a2589c8 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_connection_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_CreateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_create_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + connection=connection, + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_CreateConnection_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_connection_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_connection_sync.py new file mode 100644 index 000000000000..64ddd0ba8f40 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_connection_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_CreateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_create_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + connection=connection, + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_CreateConnection_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_git_repository_link_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_git_repository_link_async.py new file mode 100644 index 000000000000..0e03ce7f6e9e --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_git_repository_link_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGitRepositoryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_CreateGitRepositoryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_create_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + git_repository_link = developerconnect_v1.GitRepositoryLink() + git_repository_link.clone_uri = "clone_uri_value" + + request = developerconnect_v1.CreateGitRepositoryLinkRequest( + parent="parent_value", + git_repository_link=git_repository_link, + git_repository_link_id="git_repository_link_id_value", + ) + + # Make the request + operation = client.create_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_CreateGitRepositoryLink_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_git_repository_link_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_git_repository_link_sync.py new file mode 100644 index 000000000000..c2d70835c037 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_create_git_repository_link_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGitRepositoryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_CreateGitRepositoryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_create_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + git_repository_link = developerconnect_v1.GitRepositoryLink() + git_repository_link.clone_uri = "clone_uri_value" + + request = developerconnect_v1.CreateGitRepositoryLinkRequest( + parent="parent_value", + git_repository_link=git_repository_link, + git_repository_link_id="git_repository_link_id_value", + ) + + # Make the request + operation = client.create_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_CreateGitRepositoryLink_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_connection_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_connection_async.py new file mode 100644 index 000000000000..a33dc55f978d --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_DeleteConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_delete_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_DeleteConnection_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_connection_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_connection_sync.py new file mode 100644 index 000000000000..d9a4e187e7bb --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_DeleteConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_delete_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_DeleteConnection_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_git_repository_link_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_git_repository_link_async.py new file mode 100644 index 000000000000..ece745f26411 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_git_repository_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGitRepositoryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_DeleteGitRepositoryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_delete_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_DeleteGitRepositoryLink_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_git_repository_link_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_git_repository_link_sync.py new file mode 100644 index 000000000000..1713257a3591 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_delete_git_repository_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGitRepositoryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_DeleteGitRepositoryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_delete_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.DeleteGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_git_repository_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_DeleteGitRepositoryLink_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_async.py new file mode 100644 index 000000000000..c4aadd29d06a --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitHubInstallations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchGitHubInstallations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_fetch_git_hub_installations(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitHubInstallationsRequest( + connection="connection_value", + ) + + # Make the request + response = await client.fetch_git_hub_installations(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchGitHubInstallations_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_sync.py new file mode 100644 index 000000000000..748b35fe2a83 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitHubInstallations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchGitHubInstallations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_fetch_git_hub_installations(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitHubInstallationsRequest( + connection="connection_value", + ) + + # Make the request + response = client.fetch_git_hub_installations(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchGitHubInstallations_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_refs_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_refs_async.py new file mode 100644 index 000000000000..38094f692864 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_refs_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitRefs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchGitRefs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_fetch_git_refs(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitRefsRequest( + git_repository_link="git_repository_link_value", + ref_type="BRANCH", + ) + + # Make the request + page_result = client.fetch_git_refs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchGitRefs_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_refs_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_refs_sync.py new file mode 100644 index 000000000000..4314023e6b96 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_git_refs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitRefs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchGitRefs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_fetch_git_refs(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchGitRefsRequest( + git_repository_link="git_repository_link_value", + ref_type="BRANCH", + ) + + # Make the request + page_result = client.fetch_git_refs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchGitRefs_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_async.py similarity index 69% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py rename to packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_async.py index 29aa081f181f..56ca0781b320 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListDatasetVersions +# Snippet for FetchLinkableGitRepositories # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-developerconnect -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_async] +# [START developerconnect_v1_generated_DeveloperConnect_FetchLinkableGitRepositories_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import developerconnect_v1 -async def sample_list_dataset_versions(): +async def sample_fetch_linkable_git_repositories(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = developerconnect_v1.DeveloperConnectAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( - name="name_value", + request = developerconnect_v1.FetchLinkableGitRepositoriesRequest( + connection="connection_value", ) # Make the request - page_result = client.list_dataset_versions(request=request) + page_result = client.fetch_linkable_git_repositories(request=request) # Handle the response async for response in page_result: print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_async] +# [END developerconnect_v1_generated_DeveloperConnect_FetchLinkableGitRepositories_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_sync.py similarity index 69% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py rename to packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_sync.py index ba1b31287004..4e6daf0db822 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListDatasetVersions +# Snippet for FetchLinkableGitRepositories # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-developerconnect -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_sync] +# [START developerconnect_v1_generated_DeveloperConnect_FetchLinkableGitRepositories_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import developerconnect_v1 -def sample_list_dataset_versions(): +def sample_fetch_linkable_git_repositories(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = developerconnect_v1.DeveloperConnectClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( - name="name_value", + request = developerconnect_v1.FetchLinkableGitRepositoriesRequest( + connection="connection_value", ) # Make the request - page_result = client.list_dataset_versions(request=request) + page_result = client.fetch_linkable_git_repositories(request=request) # Handle the response for response in page_result: print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_sync] +# [END developerconnect_v1_generated_DeveloperConnect_FetchLinkableGitRepositories_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_token_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_token_async.py new file mode 100644 index 000000000000..87902152cdb1 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchReadToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_fetch_read_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = await client.fetch_read_token(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchReadToken_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_token_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_token_sync.py new file mode 100644 index 000000000000..ea9d835165ae --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchReadToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_fetch_read_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = client.fetch_read_token(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchReadToken_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_write_token_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_write_token_async.py new file mode 100644 index 000000000000..668ac5fe6f2f --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_write_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadWriteToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchReadWriteToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_fetch_read_write_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadWriteTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = await client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchReadWriteToken_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_write_token_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_write_token_sync.py new file mode 100644 index 000000000000..0987908cb38d --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_fetch_read_write_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadWriteToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_FetchReadWriteToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_fetch_read_write_token(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.FetchReadWriteTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Make the request + response = client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_FetchReadWriteToken_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_connection_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_connection_async.py new file mode 100644 index 000000000000..58b49aa7f805 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_GetConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_get_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_GetConnection_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_connection_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_connection_sync.py new file mode 100644 index 000000000000..8be6ca3ccf9f --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_GetConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_get_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_GetConnection_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_git_repository_link_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_git_repository_link_async.py new file mode 100644 index 000000000000..07ecece2b952 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_git_repository_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGitRepositoryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_GetGitRepositoryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_get_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_git_repository_link(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_GetGitRepositoryLink_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_git_repository_link_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_git_repository_link_sync.py new file mode 100644 index 000000000000..3d5935f3063c --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_get_git_repository_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGitRepositoryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_GetGitRepositoryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_get_git_repository_link(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.GetGitRepositoryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_git_repository_link(request=request) + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_GetGitRepositoryLink_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_connections_async.py similarity index 72% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py rename to packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_connections_async.py index 62c8b47a0732..7e629690a318 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_connections_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListDatasets +# Snippet for ListConnections # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-developerconnect -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_async] +# [START developerconnect_v1_generated_DeveloperConnect_ListConnections_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import developerconnect_v1 -async def sample_list_datasets(): +async def sample_list_connections(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = developerconnect_v1.DeveloperConnectAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + request = developerconnect_v1.ListConnectionsRequest( parent="parent_value", ) # Make the request - page_result = client.list_datasets(request=request) + page_result = client.list_connections(request=request) # Handle the response async for response in page_result: print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_async] +# [END developerconnect_v1_generated_DeveloperConnect_ListConnections_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_connections_sync.py similarity index 72% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py rename to packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_connections_sync.py index 70c63242d8c4..fb80407dbf71 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_connections_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListDatasets +# Snippet for ListConnections # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-developerconnect -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_sync] +# [START developerconnect_v1_generated_DeveloperConnect_ListConnections_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import developerconnect_v1 -def sample_list_datasets(): +def sample_list_connections(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = developerconnect_v1.DeveloperConnectClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + request = developerconnect_v1.ListConnectionsRequest( parent="parent_value", ) # Make the request - page_result = client.list_datasets(request=request) + page_result = client.list_connections(request=request) # Handle the response for response in page_result: print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_sync] +# [END developerconnect_v1_generated_DeveloperConnect_ListConnections_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_git_repository_links_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_git_repository_links_async.py new file mode 100644 index 000000000000..0a00f5a703af --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_git_repository_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGitRepositoryLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_ListGitRepositoryLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_list_git_repository_links(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + request = developerconnect_v1.ListGitRepositoryLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_git_repository_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_ListGitRepositoryLinks_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_git_repository_links_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_git_repository_links_sync.py new file mode 100644 index 000000000000..1b2546e87ba0 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_list_git_repository_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGitRepositoryLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_ListGitRepositoryLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_list_git_repository_links(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + request = developerconnect_v1.ListGitRepositoryLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_git_repository_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_ListGitRepositoryLinks_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_update_connection_async.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_update_connection_async.py new file mode 100644 index 000000000000..8e81098e07dd --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_update_connection_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_UpdateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +async def sample_update_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectAsyncClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.UpdateConnectionRequest( + connection=connection, + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_UpdateConnection_async] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_update_connection_sync.py b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_update_connection_sync.py new file mode 100644 index 000000000000..789dbdf44659 --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/developerconnect_v1_generated_developer_connect_update_connection_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-developerconnect + + +# [START developerconnect_v1_generated_DeveloperConnect_UpdateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import developerconnect_v1 + + +def sample_update_connection(): + # Create a client + client = developerconnect_v1.DeveloperConnectClient() + + # Initialize request argument(s) + connection = developerconnect_v1.Connection() + connection.github_config.github_app = "FIREBASE" + + request = developerconnect_v1.UpdateConnectionRequest( + connection=connection, + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END developerconnect_v1_generated_DeveloperConnect_UpdateConnection_sync] diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json new file mode 100644 index 000000000000..6d8efa20b69c --- /dev/null +++ b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json @@ -0,0 +1,2317 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.developerconnect.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-developerconnect", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.create_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.CreateConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.developerconnect_v1.types.Connection" + }, + { + "name": "connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "developerconnect_v1_generated_developer_connect_create_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_CreateConnection_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_create_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.create_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.CreateConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.developerconnect_v1.types.Connection" + }, + { + "name": "connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "developerconnect_v1_generated_developer_connect_create_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_CreateConnection_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_create_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.create_git_repository_link", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.CreateGitRepositoryLink", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "CreateGitRepositoryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.CreateGitRepositoryLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "git_repository_link", + "type": "google.cloud.developerconnect_v1.types.GitRepositoryLink" + }, + { + "name": "git_repository_link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_git_repository_link" + }, + "description": "Sample for CreateGitRepositoryLink", + "file": "developerconnect_v1_generated_developer_connect_create_git_repository_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_CreateGitRepositoryLink_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_create_git_repository_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.create_git_repository_link", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.CreateGitRepositoryLink", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "CreateGitRepositoryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.CreateGitRepositoryLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "git_repository_link", + "type": "google.cloud.developerconnect_v1.types.GitRepositoryLink" + }, + { + "name": "git_repository_link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_git_repository_link" + }, + "description": "Sample for CreateGitRepositoryLink", + "file": "developerconnect_v1_generated_developer_connect_create_git_repository_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_CreateGitRepositoryLink_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_create_git_repository_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.delete_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.DeleteConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "developerconnect_v1_generated_developer_connect_delete_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_DeleteConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_delete_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.delete_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.DeleteConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "developerconnect_v1_generated_developer_connect_delete_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_DeleteConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_delete_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.delete_git_repository_link", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.DeleteGitRepositoryLink", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "DeleteGitRepositoryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.DeleteGitRepositoryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_git_repository_link" + }, + "description": "Sample for DeleteGitRepositoryLink", + "file": "developerconnect_v1_generated_developer_connect_delete_git_repository_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_DeleteGitRepositoryLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_delete_git_repository_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.delete_git_repository_link", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.DeleteGitRepositoryLink", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "DeleteGitRepositoryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.DeleteGitRepositoryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_git_repository_link" + }, + "description": "Sample for DeleteGitRepositoryLink", + "file": "developerconnect_v1_generated_developer_connect_delete_git_repository_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_DeleteGitRepositoryLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_delete_git_repository_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.fetch_git_hub_installations", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchGitHubInstallations", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchGitHubInstallations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchGitHubInstallationsRequest" + }, + { + "name": "connection", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.FetchGitHubInstallationsResponse", + "shortName": "fetch_git_hub_installations" + }, + "description": "Sample for FetchGitHubInstallations", + "file": "developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchGitHubInstallations_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.fetch_git_hub_installations", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchGitHubInstallations", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchGitHubInstallations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchGitHubInstallationsRequest" + }, + { + "name": "connection", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.FetchGitHubInstallationsResponse", + "shortName": "fetch_git_hub_installations" + }, + "description": "Sample for FetchGitHubInstallations", + "file": "developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchGitHubInstallations_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_git_hub_installations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.fetch_git_refs", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchGitRefs", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchGitRefs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchGitRefsRequest" + }, + { + "name": "git_repository_link", + "type": "str" + }, + { + "name": "ref_type", + "type": "google.cloud.developerconnect_v1.types.FetchGitRefsRequest.RefType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchGitRefsAsyncPager", + "shortName": "fetch_git_refs" + }, + "description": "Sample for FetchGitRefs", + "file": "developerconnect_v1_generated_developer_connect_fetch_git_refs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchGitRefs_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_git_refs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.fetch_git_refs", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchGitRefs", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchGitRefs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchGitRefsRequest" + }, + { + "name": "git_repository_link", + "type": "str" + }, + { + "name": "ref_type", + "type": "google.cloud.developerconnect_v1.types.FetchGitRefsRequest.RefType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchGitRefsPager", + "shortName": "fetch_git_refs" + }, + "description": "Sample for FetchGitRefs", + "file": "developerconnect_v1_generated_developer_connect_fetch_git_refs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchGitRefs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_git_refs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.fetch_linkable_git_repositories", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchLinkableGitRepositories", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchLinkableGitRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesRequest" + }, + { + "name": "connection", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchLinkableGitRepositoriesAsyncPager", + "shortName": "fetch_linkable_git_repositories" + }, + "description": "Sample for FetchLinkableGitRepositories", + "file": "developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchLinkableGitRepositories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.fetch_linkable_git_repositories", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchLinkableGitRepositories", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchLinkableGitRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchLinkableGitRepositoriesRequest" + }, + { + "name": "connection", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.FetchLinkableGitRepositoriesPager", + "shortName": "fetch_linkable_git_repositories" + }, + "description": "Sample for FetchLinkableGitRepositories", + "file": "developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchLinkableGitRepositories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_linkable_git_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.fetch_read_token", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchReadToken", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchReadToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchReadTokenRequest" + }, + { + "name": "git_repository_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.FetchReadTokenResponse", + "shortName": "fetch_read_token" + }, + "description": "Sample for FetchReadToken", + "file": "developerconnect_v1_generated_developer_connect_fetch_read_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchReadToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_read_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.fetch_read_token", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchReadToken", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchReadToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchReadTokenRequest" + }, + { + "name": "git_repository_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.FetchReadTokenResponse", + "shortName": "fetch_read_token" + }, + "description": "Sample for FetchReadToken", + "file": "developerconnect_v1_generated_developer_connect_fetch_read_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchReadToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_read_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.fetch_read_write_token", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchReadWriteToken", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchReadWriteToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchReadWriteTokenRequest" + }, + { + "name": "git_repository_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.FetchReadWriteTokenResponse", + "shortName": "fetch_read_write_token" + }, + "description": "Sample for FetchReadWriteToken", + "file": "developerconnect_v1_generated_developer_connect_fetch_read_write_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchReadWriteToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_read_write_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.fetch_read_write_token", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.FetchReadWriteToken", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "FetchReadWriteToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.FetchReadWriteTokenRequest" + }, + { + "name": "git_repository_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.FetchReadWriteTokenResponse", + "shortName": "fetch_read_write_token" + }, + "description": "Sample for FetchReadWriteToken", + "file": "developerconnect_v1_generated_developer_connect_fetch_read_write_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_FetchReadWriteToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_fetch_read_write_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.get_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.GetConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "developerconnect_v1_generated_developer_connect_get_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_GetConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_get_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.get_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.GetConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "developerconnect_v1_generated_developer_connect_get_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_GetConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_get_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.get_git_repository_link", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.GetGitRepositoryLink", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "GetGitRepositoryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.GetGitRepositoryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.GitRepositoryLink", + "shortName": "get_git_repository_link" + }, + "description": "Sample for GetGitRepositoryLink", + "file": "developerconnect_v1_generated_developer_connect_get_git_repository_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_GetGitRepositoryLink_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_get_git_repository_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.get_git_repository_link", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.GetGitRepositoryLink", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "GetGitRepositoryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.GetGitRepositoryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.types.GitRepositoryLink", + "shortName": "get_git_repository_link" + }, + "description": "Sample for GetGitRepositoryLink", + "file": "developerconnect_v1_generated_developer_connect_get_git_repository_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_GetGitRepositoryLink_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_get_git_repository_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.list_connections", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.ListConnections", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.ListConnectionsAsyncPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "developerconnect_v1_generated_developer_connect_list_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_ListConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_list_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.list_connections", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.ListConnections", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.ListConnectionsPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "developerconnect_v1_generated_developer_connect_list_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_ListConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_list_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.list_git_repository_links", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.ListGitRepositoryLinks", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "ListGitRepositoryLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.ListGitRepositoryLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.ListGitRepositoryLinksAsyncPager", + "shortName": "list_git_repository_links" + }, + "description": "Sample for ListGitRepositoryLinks", + "file": "developerconnect_v1_generated_developer_connect_list_git_repository_links_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_ListGitRepositoryLinks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_list_git_repository_links_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.list_git_repository_links", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.ListGitRepositoryLinks", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "ListGitRepositoryLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.ListGitRepositoryLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.developerconnect_v1.services.developer_connect.pagers.ListGitRepositoryLinksPager", + "shortName": "list_git_repository_links" + }, + "description": "Sample for ListGitRepositoryLinks", + "file": "developerconnect_v1_generated_developer_connect_list_git_repository_links_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_ListGitRepositoryLinks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_list_git_repository_links_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient", + "shortName": "DeveloperConnectAsyncClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectAsyncClient.update_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.UpdateConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.UpdateConnectionRequest" + }, + { + "name": "connection", + "type": "google.cloud.developerconnect_v1.types.Connection" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "developerconnect_v1_generated_developer_connect_update_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_UpdateConnection_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_update_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient", + "shortName": "DeveloperConnectClient" + }, + "fullName": "google.cloud.developerconnect_v1.DeveloperConnectClient.update_connection", + "method": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect.UpdateConnection", + "service": { + "fullName": "google.cloud.developerconnect.v1.DeveloperConnect", + "shortName": "DeveloperConnect" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.developerconnect_v1.types.UpdateConnectionRequest" + }, + { + "name": "connection", + "type": "google.cloud.developerconnect_v1.types.Connection" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "developerconnect_v1_generated_developer_connect_update_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "developerconnect_v1_generated_DeveloperConnect_UpdateConnection_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "developerconnect_v1_generated_developer_connect_update_connection_sync.py" + } + ] +} diff --git a/packages/google-cloud-developerconnect/scripts/decrypt-secrets.sh b/packages/google-cloud-developerconnect/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-developerconnect/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-developerconnect/scripts/fixup_developerconnect_v1_keywords.py b/packages/google-cloud-developerconnect/scripts/fixup_developerconnect_v1_keywords.py new file mode 100644 index 000000000000..325fc7cc85be --- /dev/null +++ b/packages/google-cloud-developerconnect/scripts/fixup_developerconnect_v1_keywords.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class developerconnectCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_connection': ('parent', 'connection_id', 'connection', 'request_id', 'validate_only', ), + 'create_git_repository_link': ('parent', 'git_repository_link', 'git_repository_link_id', 'request_id', 'validate_only', ), + 'delete_connection': ('name', 'request_id', 'validate_only', 'etag', ), + 'delete_git_repository_link': ('name', 'request_id', 'validate_only', 'etag', ), + 'fetch_git_hub_installations': ('connection', ), + 'fetch_git_refs': ('git_repository_link', 'ref_type', 'page_size', 'page_token', ), + 'fetch_linkable_git_repositories': ('connection', 'page_size', 'page_token', ), + 'fetch_read_token': ('git_repository_link', ), + 'fetch_read_write_token': ('git_repository_link', ), + 'get_connection': ('name', ), + 'get_git_repository_link': ('name', ), + 'list_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_git_repository_links': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_connection': ('update_mask', 'connection', 'request_id', 'allow_missing', 'validate_only', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=developerconnectCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the developerconnect client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-developerconnect/setup.py b/packages/google-cloud-developerconnect/setup.py new file mode 100644 index 000000000000..1d96015e600b --- /dev/null +++ b/packages/google-cloud-developerconnect/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-developerconnect" + + +description = "Google Cloud Developerconnect API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/developerconnect/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-developerconnect/testing/.gitignore b/packages/google-cloud-developerconnect/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-developerconnect/testing/constraints-3.10.txt b/packages/google-cloud-developerconnect/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-developerconnect/testing/constraints-3.11.txt b/packages/google-cloud-developerconnect/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-developerconnect/testing/constraints-3.12.txt b/packages/google-cloud-developerconnect/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-developerconnect/testing/constraints-3.7.txt b/packages/google-cloud-developerconnect/testing/constraints-3.7.txt new file mode 100644 index 000000000000..b8a550c73855 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-cloud-developerconnect/testing/constraints-3.8.txt b/packages/google-cloud-developerconnect/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-developerconnect/testing/constraints-3.9.txt b/packages/google-cloud-developerconnect/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-developerconnect/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/__init__.py b/packages/google-cloud-developerconnect/tests/__init__.py similarity index 100% rename from packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/__init__.py rename to packages/google-cloud-developerconnect/tests/__init__.py diff --git a/packages/google-cloud-developerconnect/tests/unit/__init__.py b/packages/google-cloud-developerconnect/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-developerconnect/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/__init__.py b/packages/google-cloud-developerconnect/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/__init__.py b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py new file mode 100644 index 000000000000..6fb76c1ea329 --- /dev/null +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py @@ -0,0 +1,14792 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.developerconnect_v1.services.developer_connect import ( + DeveloperConnectAsyncClient, + DeveloperConnectClient, + pagers, + transports, +) +from google.cloud.developerconnect_v1.types import developer_connect + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DeveloperConnectClient._get_default_mtls_endpoint(None) is None + assert ( + DeveloperConnectClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DeveloperConnectClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DeveloperConnectClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DeveloperConnectClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DeveloperConnectClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert DeveloperConnectClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DeveloperConnectClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DeveloperConnectClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DeveloperConnectClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DeveloperConnectClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DeveloperConnectClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DeveloperConnectClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DeveloperConnectClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DeveloperConnectClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DeveloperConnectClient._get_client_cert_source(None, False) is None + assert ( + DeveloperConnectClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DeveloperConnectClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DeveloperConnectClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DeveloperConnectClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DeveloperConnectClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectClient), +) +@mock.patch.object( + DeveloperConnectAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DeveloperConnectClient._DEFAULT_UNIVERSE + default_endpoint = DeveloperConnectClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DeveloperConnectClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DeveloperConnectClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DeveloperConnectClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DeveloperConnectClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DeveloperConnectClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DeveloperConnectClient._get_api_endpoint(None, None, default_universe, "always") + == DeveloperConnectClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DeveloperConnectClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DeveloperConnectClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DeveloperConnectClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DeveloperConnectClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DeveloperConnectClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DeveloperConnectClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DeveloperConnectClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DeveloperConnectClient._get_universe_domain(None, None) + == DeveloperConnectClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DeveloperConnectClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DeveloperConnectClient, transports.DeveloperConnectGrpcTransport, "grpc"), + (DeveloperConnectClient, transports.DeveloperConnectRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DeveloperConnectClient, "grpc"), + (DeveloperConnectAsyncClient, "grpc_asyncio"), + (DeveloperConnectClient, "rest"), + ], +) +def test_developer_connect_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "developerconnect.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://developerconnect.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DeveloperConnectGrpcTransport, "grpc"), + (transports.DeveloperConnectGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DeveloperConnectRestTransport, "rest"), + ], +) +def test_developer_connect_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DeveloperConnectClient, "grpc"), + (DeveloperConnectAsyncClient, "grpc_asyncio"), + (DeveloperConnectClient, "rest"), + ], +) +def test_developer_connect_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "developerconnect.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://developerconnect.googleapis.com" + ) + + +def test_developer_connect_client_get_transport_class(): + transport = DeveloperConnectClient.get_transport_class() + available_transports = [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectRestTransport, + ] + assert transport in available_transports + + transport = DeveloperConnectClient.get_transport_class("grpc") + assert transport == transports.DeveloperConnectGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DeveloperConnectClient, transports.DeveloperConnectGrpcTransport, "grpc"), + ( + DeveloperConnectAsyncClient, + transports.DeveloperConnectGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DeveloperConnectClient, transports.DeveloperConnectRestTransport, "rest"), + ], +) +@mock.patch.object( + DeveloperConnectClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectClient), +) +@mock.patch.object( + DeveloperConnectAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectAsyncClient), +) +def test_developer_connect_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DeveloperConnectClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DeveloperConnectClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DeveloperConnectClient, + transports.DeveloperConnectGrpcTransport, + "grpc", + "true", + ), + ( + DeveloperConnectAsyncClient, + transports.DeveloperConnectGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DeveloperConnectClient, + transports.DeveloperConnectGrpcTransport, + "grpc", + "false", + ), + ( + DeveloperConnectAsyncClient, + transports.DeveloperConnectGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DeveloperConnectClient, + transports.DeveloperConnectRestTransport, + "rest", + "true", + ), + ( + DeveloperConnectClient, + transports.DeveloperConnectRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DeveloperConnectClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectClient), +) +@mock.patch.object( + DeveloperConnectAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_developer_connect_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DeveloperConnectClient, DeveloperConnectAsyncClient] +) +@mock.patch.object( + DeveloperConnectClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeveloperConnectClient), +) +@mock.patch.object( + DeveloperConnectAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeveloperConnectAsyncClient), +) +def test_developer_connect_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DeveloperConnectClient, DeveloperConnectAsyncClient] +) +@mock.patch.object( + DeveloperConnectClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectClient), +) +@mock.patch.object( + DeveloperConnectAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DeveloperConnectAsyncClient), +) +def test_developer_connect_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DeveloperConnectClient._DEFAULT_UNIVERSE + default_endpoint = DeveloperConnectClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DeveloperConnectClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DeveloperConnectClient, transports.DeveloperConnectGrpcTransport, "grpc"), + ( + DeveloperConnectAsyncClient, + transports.DeveloperConnectGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DeveloperConnectClient, transports.DeveloperConnectRestTransport, "rest"), + ], +) +def test_developer_connect_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DeveloperConnectClient, + transports.DeveloperConnectGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DeveloperConnectAsyncClient, + transports.DeveloperConnectGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DeveloperConnectClient, + transports.DeveloperConnectRestTransport, + "rest", + None, + ), + ], +) +def test_developer_connect_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_developer_connect_client_client_options_from_dict(): + with mock.patch( + "google.cloud.developerconnect_v1.services.developer_connect.transports.DeveloperConnectGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DeveloperConnectClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DeveloperConnectClient, + transports.DeveloperConnectGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DeveloperConnectAsyncClient, + transports.DeveloperConnectGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_developer_connect_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "developerconnect.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="developerconnect.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.ListConnectionsRequest, + dict, + ], +) +def test_list_connections(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.ListConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.ListConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.ListConnectionsRequest() + + +def test_list_connections_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.ListConnectionsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_connections(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.ListConnectionsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_connections_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_connections + ] = mock_rpc + request = {} + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_connections_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.ListConnectionsRequest() + + +@pytest.mark.asyncio +async def test_list_connections_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_connections + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_connections + ] = mock_object + + request = {} + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_connections_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.ListConnectionsRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.ListConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_connections_async_from_dict(): + await test_list_connections_async(request_type=dict) + + +def test_list_connections_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.ListConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value = developer_connect.ListConnectionsResponse() + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_connections_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.ListConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListConnectionsResponse() + ) + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_connections_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.ListConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_connections_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + developer_connect.ListConnectionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_connections_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.ListConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListConnectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_connections_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connections( + developer_connect.ListConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_connections_pager(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + developer_connect.Connection(), + ], + next_page_token="abc", + ), + developer_connect.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + ], + next_page_token="ghi", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_connections(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, developer_connect.Connection) for i in results) + + +def test_list_connections_pages(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + developer_connect.Connection(), + ], + next_page_token="abc", + ), + developer_connect.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + ], + next_page_token="ghi", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_connections_async_pager(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + developer_connect.Connection(), + ], + next_page_token="abc", + ), + developer_connect.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + ], + next_page_token="ghi", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, developer_connect.Connection) for i in responses) + + +@pytest.mark.asyncio +async def test_list_connections_async_pages(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + developer_connect.Connection(), + ], + next_page_token="abc", + ), + developer_connect.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + ], + next_page_token="ghi", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.GetConnectionRequest, + dict, + ], +) +def test_get_connection(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.Connection( + name="name_value", + disabled=True, + reconciling=True, + etag="etag_value", + uid="uid_value", + ) + response = client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.GetConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.Connection) + assert response.name == "name_value" + assert response.disabled is True + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.uid == "uid_value" + + +def test_get_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.GetConnectionRequest() + + +def test_get_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.GetConnectionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.GetConnectionRequest( + name="name_value", + ) + + +def test_get_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc + request = {} + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.Connection( + name="name_value", + disabled=True, + reconciling=True, + etag="etag_value", + uid="uid_value", + ) + ) + response = await client.get_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.GetConnectionRequest() + + +@pytest.mark.asyncio +async def test_get_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_connection + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_connection + ] = mock_object + + request = {} + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_connection_async( + transport: str = "grpc_asyncio", request_type=developer_connect.GetConnectionRequest +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.Connection( + name="name_value", + disabled=True, + reconciling=True, + etag="etag_value", + uid="uid_value", + ) + ) + response = await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.GetConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.Connection) + assert response.name == "name_value" + assert response.disabled is True + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_get_connection_async_from_dict(): + await test_get_connection_async(request_type=dict) + + +def test_get_connection_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.GetConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + call.return_value = developer_connect.Connection() + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_connection_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.GetConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.Connection() + ) + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_connection_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_connection_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + developer_connect.GetConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_connection_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.Connection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_connection_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection( + developer_connect.GetConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.CreateConnectionRequest, + dict, + ], +) +def test_create_connection(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.CreateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.CreateConnectionRequest() + + +def test_create_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + +def test_create_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_connection + ] = mock_rpc + request = {} + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.CreateConnectionRequest() + + +@pytest.mark.asyncio +async def test_create_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_connection + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_connection + ] = mock_object + + request = {} + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_connection_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.CreateConnectionRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.CreateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_connection_async_from_dict(): + await test_create_connection_async(request_type=dict) + + +def test_create_connection_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.CreateConnectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_connection_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.CreateConnectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_connection_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection( + parent="parent_value", + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + connection_id="connection_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].connection + mock_val = developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ) + assert arg == mock_val + arg = args[0].connection_id + mock_val = "connection_id_value" + assert arg == mock_val + + +def test_create_connection_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + developer_connect.CreateConnectionRequest(), + parent="parent_value", + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + connection_id="connection_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_connection_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection( + parent="parent_value", + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + connection_id="connection_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].connection + mock_val = developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ) + assert arg == mock_val + arg = args[0].connection_id + mock_val = "connection_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_connection_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection( + developer_connect.CreateConnectionRequest(), + parent="parent_value", + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + connection_id="connection_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.UpdateConnectionRequest, + dict, + ], +) +def test_update_connection(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.UpdateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.UpdateConnectionRequest() + + +def test_update_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.UpdateConnectionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.UpdateConnectionRequest() + + +def test_update_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_connection + ] = mock_rpc + request = {} + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.UpdateConnectionRequest() + + +@pytest.mark.asyncio +async def test_update_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_connection + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_connection + ] = mock_object + + request = {} + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_connection_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.UpdateConnectionRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.UpdateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_connection_async_from_dict(): + await test_update_connection_async(request_type=dict) + + +def test_update_connection_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.UpdateConnectionRequest() + + request.connection.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "connection.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_connection_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.UpdateConnectionRequest() + + request.connection.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "connection.name=name_value", + ) in kw["metadata"] + + +def test_update_connection_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection( + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_connection_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + developer_connect.UpdateConnectionRequest(), + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_connection_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection( + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_connection_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection( + developer_connect.UpdateConnectionRequest(), + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.DeleteConnectionRequest, + dict, + ], +) +def test_delete_connection(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.DeleteConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.DeleteConnectionRequest() + + +def test_delete_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.DeleteConnectionRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.DeleteConnectionRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_connection + ] = mock_rpc + request = {} + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.DeleteConnectionRequest() + + +@pytest.mark.asyncio +async def test_delete_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_connection + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_connection + ] = mock_object + + request = {} + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_connection_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.DeleteConnectionRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.DeleteConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_connection_async_from_dict(): + await test_delete_connection_async(request_type=dict) + + +def test_delete_connection_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.DeleteConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_connection_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.DeleteConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_connection_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_connection_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + developer_connect.DeleteConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_connection_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_connection_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection( + developer_connect.DeleteConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.CreateGitRepositoryLinkRequest, + dict, + ], +) +def test_create_git_repository_link(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.CreateGitRepositoryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_git_repository_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_git_repository_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.CreateGitRepositoryLinkRequest() + + +def test_create_git_repository_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.CreateGitRepositoryLinkRequest( + parent="parent_value", + git_repository_link_id="git_repository_link_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_git_repository_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.CreateGitRepositoryLinkRequest( + parent="parent_value", + git_repository_link_id="git_repository_link_id_value", + ) + + +def test_create_git_repository_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_git_repository_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_git_repository_link + ] = mock_rpc + request = {} + client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_git_repository_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_git_repository_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.CreateGitRepositoryLinkRequest() + + +@pytest.mark.asyncio +async def test_create_git_repository_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_git_repository_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_git_repository_link + ] = mock_object + + request = {} + await client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_git_repository_link_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.CreateGitRepositoryLinkRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.CreateGitRepositoryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_git_repository_link_async_from_dict(): + await test_create_git_repository_link_async(request_type=dict) + + +def test_create_git_repository_link_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.CreateGitRepositoryLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_git_repository_link_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.CreateGitRepositoryLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_git_repository_link_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_git_repository_link( + parent="parent_value", + git_repository_link=developer_connect.GitRepositoryLink(name="name_value"), + git_repository_link_id="git_repository_link_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].git_repository_link + mock_val = developer_connect.GitRepositoryLink(name="name_value") + assert arg == mock_val + arg = args[0].git_repository_link_id + mock_val = "git_repository_link_id_value" + assert arg == mock_val + + +def test_create_git_repository_link_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_git_repository_link( + developer_connect.CreateGitRepositoryLinkRequest(), + parent="parent_value", + git_repository_link=developer_connect.GitRepositoryLink(name="name_value"), + git_repository_link_id="git_repository_link_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_git_repository_link_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_git_repository_link( + parent="parent_value", + git_repository_link=developer_connect.GitRepositoryLink(name="name_value"), + git_repository_link_id="git_repository_link_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].git_repository_link + mock_val = developer_connect.GitRepositoryLink(name="name_value") + assert arg == mock_val + arg = args[0].git_repository_link_id + mock_val = "git_repository_link_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_git_repository_link_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_git_repository_link( + developer_connect.CreateGitRepositoryLinkRequest(), + parent="parent_value", + git_repository_link=developer_connect.GitRepositoryLink(name="name_value"), + git_repository_link_id="git_repository_link_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.DeleteGitRepositoryLinkRequest, + dict, + ], +) +def test_delete_git_repository_link(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.DeleteGitRepositoryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_git_repository_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_git_repository_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.DeleteGitRepositoryLinkRequest() + + +def test_delete_git_repository_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.DeleteGitRepositoryLinkRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_git_repository_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.DeleteGitRepositoryLinkRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_git_repository_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_git_repository_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_git_repository_link + ] = mock_rpc + request = {} + client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_git_repository_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.DeleteGitRepositoryLinkRequest() + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_git_repository_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_git_repository_link + ] = mock_object + + request = {} + await client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.DeleteGitRepositoryLinkRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.DeleteGitRepositoryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_async_from_dict(): + await test_delete_git_repository_link_async(request_type=dict) + + +def test_delete_git_repository_link_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.DeleteGitRepositoryLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.DeleteGitRepositoryLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_git_repository_link_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_git_repository_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_git_repository_link_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_git_repository_link( + developer_connect.DeleteGitRepositoryLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_git_repository_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_git_repository_link_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_git_repository_link( + developer_connect.DeleteGitRepositoryLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.ListGitRepositoryLinksRequest, + dict, + ], +) +def test_list_git_repository_links(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.ListGitRepositoryLinksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.ListGitRepositoryLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGitRepositoryLinksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_git_repository_links_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_git_repository_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.ListGitRepositoryLinksRequest() + + +def test_list_git_repository_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.ListGitRepositoryLinksRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_git_repository_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.ListGitRepositoryLinksRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_git_repository_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_git_repository_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_git_repository_links + ] = mock_rpc + request = {} + client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_git_repository_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_git_repository_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListGitRepositoryLinksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_git_repository_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.ListGitRepositoryLinksRequest() + + +@pytest.mark.asyncio +async def test_list_git_repository_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_git_repository_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_git_repository_links + ] = mock_object + + request = {} + await client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_git_repository_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_git_repository_links_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.ListGitRepositoryLinksRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListGitRepositoryLinksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.ListGitRepositoryLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGitRepositoryLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_git_repository_links_async_from_dict(): + await test_list_git_repository_links_async(request_type=dict) + + +def test_list_git_repository_links_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.ListGitRepositoryLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + call.return_value = developer_connect.ListGitRepositoryLinksResponse() + client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_git_repository_links_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.ListGitRepositoryLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListGitRepositoryLinksResponse() + ) + await client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_git_repository_links_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.ListGitRepositoryLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_git_repository_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_git_repository_links_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_git_repository_links( + developer_connect.ListGitRepositoryLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_git_repository_links_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.ListGitRepositoryLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.ListGitRepositoryLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_git_repository_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_git_repository_links_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_git_repository_links( + developer_connect.ListGitRepositoryLinksRequest(), + parent="parent_value", + ) + + +def test_list_git_repository_links_pager(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + next_page_token="abc", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[], + next_page_token="def", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + ], + next_page_token="ghi", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_git_repository_links(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, developer_connect.GitRepositoryLink) for i in results) + + +def test_list_git_repository_links_pages(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + next_page_token="abc", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[], + next_page_token="def", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + ], + next_page_token="ghi", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_git_repository_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_git_repository_links_async_pager(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + next_page_token="abc", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[], + next_page_token="def", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + ], + next_page_token="ghi", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_git_repository_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, developer_connect.GitRepositoryLink) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_git_repository_links_async_pages(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_git_repository_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + next_page_token="abc", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[], + next_page_token="def", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + ], + next_page_token="ghi", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_git_repository_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.GetGitRepositoryLinkRequest, + dict, + ], +) +def test_get_git_repository_link(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.GitRepositoryLink( + name="name_value", + clone_uri="clone_uri_value", + etag="etag_value", + reconciling=True, + uid="uid_value", + ) + response = client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.GetGitRepositoryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.GitRepositoryLink) + assert response.name == "name_value" + assert response.clone_uri == "clone_uri_value" + assert response.etag == "etag_value" + assert response.reconciling is True + assert response.uid == "uid_value" + + +def test_get_git_repository_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_git_repository_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.GetGitRepositoryLinkRequest() + + +def test_get_git_repository_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.GetGitRepositoryLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_git_repository_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.GetGitRepositoryLinkRequest( + name="name_value", + ) + + +def test_get_git_repository_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_git_repository_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_git_repository_link + ] = mock_rpc + request = {} + client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_git_repository_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.GitRepositoryLink( + name="name_value", + clone_uri="clone_uri_value", + etag="etag_value", + reconciling=True, + uid="uid_value", + ) + ) + response = await client.get_git_repository_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.GetGitRepositoryLinkRequest() + + +@pytest.mark.asyncio +async def test_get_git_repository_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_git_repository_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_git_repository_link + ] = mock_object + + request = {} + await client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_git_repository_link_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.GetGitRepositoryLinkRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.GitRepositoryLink( + name="name_value", + clone_uri="clone_uri_value", + etag="etag_value", + reconciling=True, + uid="uid_value", + ) + ) + response = await client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.GetGitRepositoryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.GitRepositoryLink) + assert response.name == "name_value" + assert response.clone_uri == "clone_uri_value" + assert response.etag == "etag_value" + assert response.reconciling is True + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_get_git_repository_link_async_from_dict(): + await test_get_git_repository_link_async(request_type=dict) + + +def test_get_git_repository_link_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.GetGitRepositoryLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + call.return_value = developer_connect.GitRepositoryLink() + client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_git_repository_link_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.GetGitRepositoryLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.GitRepositoryLink() + ) + await client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_git_repository_link_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.GitRepositoryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_git_repository_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_git_repository_link_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_git_repository_link( + developer_connect.GetGitRepositoryLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_git_repository_link_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_git_repository_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.GitRepositoryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.GitRepositoryLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_git_repository_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_git_repository_link_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_git_repository_link( + developer_connect.GetGitRepositoryLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchReadWriteTokenRequest, + dict, + ], +) +def test_fetch_read_write_token(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchReadWriteTokenResponse( + token="token_value", + git_username="git_username_value", + ) + response = client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchReadWriteTokenRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchReadWriteTokenResponse) + assert response.token == "token_value" + assert response.git_username == "git_username_value" + + +def test_fetch_read_write_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_read_write_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchReadWriteTokenRequest() + + +def test_fetch_read_write_token_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.FetchReadWriteTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_read_write_token(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchReadWriteTokenRequest( + git_repository_link="git_repository_link_value", + ) + + +def test_fetch_read_write_token_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_read_write_token + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_read_write_token + ] = mock_rpc + request = {} + client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_read_write_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadWriteTokenResponse( + token="token_value", + git_username="git_username_value", + ) + ) + response = await client.fetch_read_write_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchReadWriteTokenRequest() + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_read_write_token + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_read_write_token + ] = mock_object + + request = {} + await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.fetch_read_write_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.FetchReadWriteTokenRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadWriteTokenResponse( + token="token_value", + git_username="git_username_value", + ) + ) + response = await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchReadWriteTokenRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchReadWriteTokenResponse) + assert response.token == "token_value" + assert response.git_username == "git_username_value" + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async_from_dict(): + await test_fetch_read_write_token_async(request_type=dict) + + +def test_fetch_read_write_token_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchReadWriteTokenRequest() + + request.git_repository_link = "git_repository_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + call.return_value = developer_connect.FetchReadWriteTokenResponse() + client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "git_repository_link=git_repository_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchReadWriteTokenRequest() + + request.git_repository_link = "git_repository_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadWriteTokenResponse() + ) + await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "git_repository_link=git_repository_link_value", + ) in kw["metadata"] + + +def test_fetch_read_write_token_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchReadWriteTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_read_write_token( + git_repository_link="git_repository_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].git_repository_link + mock_val = "git_repository_link_value" + assert arg == mock_val + + +def test_fetch_read_write_token_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_write_token( + developer_connect.FetchReadWriteTokenRequest(), + git_repository_link="git_repository_link_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchReadWriteTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadWriteTokenResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_read_write_token( + git_repository_link="git_repository_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].git_repository_link + mock_val = "git_repository_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_read_write_token( + developer_connect.FetchReadWriteTokenRequest(), + git_repository_link="git_repository_link_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchReadTokenRequest, + dict, + ], +) +def test_fetch_read_token(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchReadTokenResponse( + token="token_value", + git_username="git_username_value", + ) + response = client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchReadTokenRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchReadTokenResponse) + assert response.token == "token_value" + assert response.git_username == "git_username_value" + + +def test_fetch_read_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_read_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchReadTokenRequest() + + +def test_fetch_read_token_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.FetchReadTokenRequest( + git_repository_link="git_repository_link_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_read_token(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchReadTokenRequest( + git_repository_link="git_repository_link_value", + ) + + +def test_fetch_read_token_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_read_token in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_read_token + ] = mock_rpc + request = {} + client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_read_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_read_token_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadTokenResponse( + token="token_value", + git_username="git_username_value", + ) + ) + response = await client.fetch_read_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchReadTokenRequest() + + +@pytest.mark.asyncio +async def test_fetch_read_token_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_read_token + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_read_token + ] = mock_object + + request = {} + await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.fetch_read_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_read_token_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.FetchReadTokenRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadTokenResponse( + token="token_value", + git_username="git_username_value", + ) + ) + response = await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchReadTokenRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchReadTokenResponse) + assert response.token == "token_value" + assert response.git_username == "git_username_value" + + +@pytest.mark.asyncio +async def test_fetch_read_token_async_from_dict(): + await test_fetch_read_token_async(request_type=dict) + + +def test_fetch_read_token_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchReadTokenRequest() + + request.git_repository_link = "git_repository_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + call.return_value = developer_connect.FetchReadTokenResponse() + client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "git_repository_link=git_repository_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_read_token_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchReadTokenRequest() + + request.git_repository_link = "git_repository_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadTokenResponse() + ) + await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "git_repository_link=git_repository_link_value", + ) in kw["metadata"] + + +def test_fetch_read_token_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchReadTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_read_token( + git_repository_link="git_repository_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].git_repository_link + mock_val = "git_repository_link_value" + assert arg == mock_val + + +def test_fetch_read_token_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_token( + developer_connect.FetchReadTokenRequest(), + git_repository_link="git_repository_link_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_read_token_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_read_token), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchReadTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchReadTokenResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_read_token( + git_repository_link="git_repository_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].git_repository_link + mock_val = "git_repository_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_read_token_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_read_token( + developer_connect.FetchReadTokenRequest(), + git_repository_link="git_repository_link_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchLinkableGitRepositoriesRequest, + dict, + ], +) +def test_fetch_linkable_git_repositories(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchLinkableGitRepositoriesResponse( + next_page_token="next_page_token_value", + ) + response = client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchLinkableGitRepositoriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableGitRepositoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_linkable_git_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_linkable_git_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchLinkableGitRepositoriesRequest() + + +def test_fetch_linkable_git_repositories_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.FetchLinkableGitRepositoriesRequest( + connection="connection_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_linkable_git_repositories(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchLinkableGitRepositoriesRequest( + connection="connection_value", + page_token="page_token_value", + ) + + +def test_fetch_linkable_git_repositories_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_linkable_git_repositories + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_linkable_git_repositories + ] = mock_rpc + request = {} + client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_linkable_git_repositories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchLinkableGitRepositoriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_linkable_git_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchLinkableGitRepositoriesRequest() + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_linkable_git_repositories + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_linkable_git_repositories + ] = mock_object + + request = {} + await client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.fetch_linkable_git_repositories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.FetchLinkableGitRepositoriesRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchLinkableGitRepositoriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchLinkableGitRepositoriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableGitRepositoriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_async_from_dict(): + await test_fetch_linkable_git_repositories_async(request_type=dict) + + +def test_fetch_linkable_git_repositories_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchLinkableGitRepositoriesRequest() + + request.connection = "connection_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + call.return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "connection=connection_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchLinkableGitRepositoriesRequest() + + request.connection = "connection_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchLinkableGitRepositoriesResponse() + ) + await client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "connection=connection_value", + ) in kw["metadata"] + + +def test_fetch_linkable_git_repositories_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_linkable_git_repositories( + connection="connection_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = "connection_value" + assert arg == mock_val + + +def test_fetch_linkable_git_repositories_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_linkable_git_repositories( + developer_connect.FetchLinkableGitRepositoriesRequest(), + connection="connection_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchLinkableGitRepositoriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_linkable_git_repositories( + connection="connection_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = "connection_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_linkable_git_repositories( + developer_connect.FetchLinkableGitRepositoriesRequest(), + connection="connection_value", + ) + + +def test_fetch_linkable_git_repositories_pager(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + next_page_token="abc", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[], + next_page_token="def", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + ], + next_page_token="ghi", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("connection", ""),)), + ) + pager = client.fetch_linkable_git_repositories(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, developer_connect.LinkableGitRepository) for i in results + ) + + +def test_fetch_linkable_git_repositories_pages(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + next_page_token="abc", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[], + next_page_token="def", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + ], + next_page_token="ghi", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_linkable_git_repositories(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_async_pager(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + next_page_token="abc", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[], + next_page_token="def", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + ], + next_page_token="ghi", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_linkable_git_repositories( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, developer_connect.LinkableGitRepository) for i in responses + ) + + +@pytest.mark.asyncio +async def test_fetch_linkable_git_repositories_async_pages(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_git_repositories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + next_page_token="abc", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[], + next_page_token="def", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + ], + next_page_token="ghi", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_linkable_git_repositories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchGitHubInstallationsRequest, + dict, + ], +) +def test_fetch_git_hub_installations(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchGitHubInstallationsResponse() + response = client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchGitHubInstallationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchGitHubInstallationsResponse) + + +def test_fetch_git_hub_installations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_git_hub_installations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchGitHubInstallationsRequest() + + +def test_fetch_git_hub_installations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.FetchGitHubInstallationsRequest( + connection="connection_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_git_hub_installations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchGitHubInstallationsRequest( + connection="connection_value", + ) + + +def test_fetch_git_hub_installations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_git_hub_installations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_git_hub_installations + ] = mock_rpc + request = {} + client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_git_hub_installations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitHubInstallationsResponse() + ) + response = await client.fetch_git_hub_installations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchGitHubInstallationsRequest() + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_git_hub_installations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_git_hub_installations + ] = mock_object + + request = {} + await client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.fetch_git_hub_installations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_async( + transport: str = "grpc_asyncio", + request_type=developer_connect.FetchGitHubInstallationsRequest, +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitHubInstallationsResponse() + ) + response = await client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchGitHubInstallationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchGitHubInstallationsResponse) + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_async_from_dict(): + await test_fetch_git_hub_installations_async(request_type=dict) + + +def test_fetch_git_hub_installations_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchGitHubInstallationsRequest() + + request.connection = "connection_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + call.return_value = developer_connect.FetchGitHubInstallationsResponse() + client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "connection=connection_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchGitHubInstallationsRequest() + + request.connection = "connection_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitHubInstallationsResponse() + ) + await client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "connection=connection_value", + ) in kw["metadata"] + + +def test_fetch_git_hub_installations_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchGitHubInstallationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_git_hub_installations( + connection="connection_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = "connection_value" + assert arg == mock_val + + +def test_fetch_git_hub_installations_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_hub_installations( + developer_connect.FetchGitHubInstallationsRequest(), + connection="connection_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_hub_installations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchGitHubInstallationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitHubInstallationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_git_hub_installations( + connection="connection_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = "connection_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_git_hub_installations_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_git_hub_installations( + developer_connect.FetchGitHubInstallationsRequest(), + connection="connection_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchGitRefsRequest, + dict, + ], +) +def test_fetch_git_refs(request_type, transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchGitRefsResponse( + ref_names=["ref_names_value"], + next_page_token="next_page_token_value", + ) + response = client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchGitRefsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchGitRefsPager) + assert response.ref_names == ["ref_names_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_git_refs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_git_refs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchGitRefsRequest() + + +def test_fetch_git_refs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = developer_connect.FetchGitRefsRequest( + git_repository_link="git_repository_link_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_git_refs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchGitRefsRequest( + git_repository_link="git_repository_link_value", + page_token="page_token_value", + ) + + +def test_fetch_git_refs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_git_refs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.fetch_git_refs] = mock_rpc + request = {} + client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_git_refs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_git_refs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitRefsResponse( + ref_names=["ref_names_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_git_refs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == developer_connect.FetchGitRefsRequest() + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_git_refs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_git_refs + ] = mock_object + + request = {} + await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.fetch_git_refs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async( + transport: str = "grpc_asyncio", request_type=developer_connect.FetchGitRefsRequest +): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitRefsResponse( + ref_names=["ref_names_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = developer_connect.FetchGitRefsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchGitRefsAsyncPager) + assert response.ref_names == ["ref_names_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async_from_dict(): + await test_fetch_git_refs_async(request_type=dict) + + +def test_fetch_git_refs_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchGitRefsRequest() + + request.git_repository_link = "git_repository_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + call.return_value = developer_connect.FetchGitRefsResponse() + client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "git_repository_link=git_repository_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_git_refs_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = developer_connect.FetchGitRefsRequest() + + request.git_repository_link = "git_repository_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitRefsResponse() + ) + await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "git_repository_link=git_repository_link_value", + ) in kw["metadata"] + + +def test_fetch_git_refs_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchGitRefsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_git_refs( + git_repository_link="git_repository_link_value", + ref_type=developer_connect.FetchGitRefsRequest.RefType.TAG, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].git_repository_link + mock_val = "git_repository_link_value" + assert arg == mock_val + arg = args[0].ref_type + mock_val = developer_connect.FetchGitRefsRequest.RefType.TAG + assert arg == mock_val + + +def test_fetch_git_refs_flattened_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_refs( + developer_connect.FetchGitRefsRequest(), + git_repository_link="git_repository_link_value", + ref_type=developer_connect.FetchGitRefsRequest.RefType.TAG, + ) + + +@pytest.mark.asyncio +async def test_fetch_git_refs_flattened_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = developer_connect.FetchGitRefsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + developer_connect.FetchGitRefsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_git_refs( + git_repository_link="git_repository_link_value", + ref_type=developer_connect.FetchGitRefsRequest.RefType.TAG, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].git_repository_link + mock_val = "git_repository_link_value" + assert arg == mock_val + arg = args[0].ref_type + mock_val = developer_connect.FetchGitRefsRequest.RefType.TAG + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_git_refs_flattened_error_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_git_refs( + developer_connect.FetchGitRefsRequest(), + git_repository_link="git_repository_link_value", + ref_type=developer_connect.FetchGitRefsRequest.RefType.TAG, + ) + + +def test_fetch_git_refs_pager(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[], + next_page_token="def", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + ], + next_page_token="ghi", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("git_repository_link", ""),)), + ) + pager = client.fetch_git_refs(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_fetch_git_refs_pages(transport_name: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_git_refs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[], + next_page_token="def", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + ], + next_page_token="ghi", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_git_refs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async_pager(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[], + next_page_token="def", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + ], + next_page_token="ghi", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_git_refs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async_pages(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[], + next_page_token="def", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + ], + next_page_token="ghi", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_git_refs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.ListConnectionsRequest, + dict, + ], +) +def test_list_connections_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.ListConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_connections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_connections + ] = mock_rpc + + request = {} + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_connections_rest_required_fields( + request_type=developer_connect.ListConnectionsRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.ListConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_connections(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_connections_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_connections._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connections_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_list_connections" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_list_connections" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.ListConnectionsRequest.pb( + developer_connect.ListConnectionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = developer_connect.ListConnectionsResponse.to_json( + developer_connect.ListConnectionsResponse() + ) + + request = developer_connect.ListConnectionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.ListConnectionsResponse() + + client.list_connections( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connections_rest_bad_request( + transport: str = "rest", request_type=developer_connect.ListConnectionsRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connections(request) + + +def test_list_connections_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.ListConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/connections" + % client.transport._host, + args[1], + ) + + +def test_list_connections_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + developer_connect.ListConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_connections_rest_pager(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + developer_connect.Connection(), + ], + next_page_token="abc", + ), + developer_connect.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + ], + next_page_token="ghi", + ), + developer_connect.ListConnectionsResponse( + connections=[ + developer_connect.Connection(), + developer_connect.Connection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + developer_connect.ListConnectionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, developer_connect.Connection) for i in results) + + pages = list(client.list_connections(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.GetConnectionRequest, + dict, + ], +) +def test_get_connection_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.Connection( + name="name_value", + disabled=True, + reconciling=True, + etag="etag_value", + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.Connection) + assert response.name == "name_value" + assert response.disabled is True + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.uid == "uid_value" + + +def test_get_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc + + request = {} + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_connection_rest_required_fields( + request_type=developer_connect.GetConnectionRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_connection_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_get_connection" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_get_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.GetConnectionRequest.pb( + developer_connect.GetConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = developer_connect.Connection.to_json( + developer_connect.Connection() + ) + + request = developer_connect.GetConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.Connection() + + client.get_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_rest_bad_request( + transport: str = "rest", request_type=developer_connect.GetConnectionRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection(request) + + +def test_get_connection_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*}" + % client.transport._host, + args[1], + ) + + +def test_get_connection_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + developer_connect.GetConnectionRequest(), + name="name_value", + ) + + +def test_get_connection_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.CreateConnectionRequest, + dict, + ], +) +def test_create_connection_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection"] = { + "github_config": { + "github_app": 1, + "authorizer_credential": { + "oauth_token_secret_version": "oauth_token_secret_version_value", + "username": "username_value", + }, + "app_installation_id": 2014, + "installation_uri": "installation_uri_value", + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "installation_state": { + "stage": 1, + "message": "message_value", + "action_uri": "action_uri_value", + }, + "disabled": True, + "reconciling": True, + "annotations": {}, + "etag": "etag_value", + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = developer_connect.CreateConnectionRequest.meta.fields["connection"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["connection"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["connection"][field])): + del request_init["connection"][field][i][subfield] + else: + del request_init["connection"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_connection + ] = mock_rpc + + request = {} + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_connection_rest_required_fields( + request_type=developer_connect.CreateConnectionRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "connectionId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "connectionId" in jsonified_request + assert jsonified_request["connectionId"] == request_init["connection_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["connectionId"] = "connection_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "connection_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "connectionId" in jsonified_request + assert jsonified_request["connectionId"] == "connection_id_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_connection(request) + + expected_params = [ + ( + "connectionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_connection_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "connectionId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "connectionId", + "connection", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_create_connection" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_create_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.CreateConnectionRequest.pb( + developer_connect.CreateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = developer_connect.CreateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_connection_rest_bad_request( + transport: str = "rest", request_type=developer_connect.CreateConnectionRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_connection(request) + + +def test_create_connection_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + connection_id="connection_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/connections" + % client.transport._host, + args[1], + ) + + +def test_create_connection_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + developer_connect.CreateConnectionRequest(), + parent="parent_value", + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + connection_id="connection_id_value", + ) + + +def test_create_connection_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.UpdateConnectionRequest, + dict, + ], +) +def test_update_connection_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "connection": {"name": "projects/sample1/locations/sample2/connections/sample3"} + } + request_init["connection"] = { + "github_config": { + "github_app": 1, + "authorizer_credential": { + "oauth_token_secret_version": "oauth_token_secret_version_value", + "username": "username_value", + }, + "app_installation_id": 2014, + "installation_uri": "installation_uri_value", + }, + "name": "projects/sample1/locations/sample2/connections/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "installation_state": { + "stage": 1, + "message": "message_value", + "action_uri": "action_uri_value", + }, + "disabled": True, + "reconciling": True, + "annotations": {}, + "etag": "etag_value", + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = developer_connect.UpdateConnectionRequest.meta.fields["connection"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["connection"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["connection"][field])): + del request_init["connection"][field][i][subfield] + else: + del request_init["connection"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_connection + ] = mock_rpc + + request = {} + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_connection_rest_required_fields( + request_type=developer_connect.UpdateConnectionRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_connection_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "connection", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_update_connection" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_update_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.UpdateConnectionRequest.pb( + developer_connect.UpdateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = developer_connect.UpdateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_connection_rest_bad_request( + transport: str = "rest", request_type=developer_connect.UpdateConnectionRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "connection": {"name": "projects/sample1/locations/sample2/connections/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_connection(request) + + +def test_update_connection_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "connection": { + "name": "projects/sample1/locations/sample2/connections/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{connection.name=projects/*/locations/*/connections/*}" + % client.transport._host, + args[1], + ) + + +def test_update_connection_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + developer_connect.UpdateConnectionRequest(), + connection=developer_connect.Connection( + github_config=developer_connect.GitHubConfig( + github_app=developer_connect.GitHubConfig.GitHubApp.DEVELOPER_CONNECT + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_connection_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.DeleteConnectionRequest, + dict, + ], +) +def test_delete_connection_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_connection + ] = mock_rpc + + request = {} + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_connection_rest_required_fields( + request_type=developer_connect.DeleteConnectionRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_connection_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_delete_connection" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_delete_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.DeleteConnectionRequest.pb( + developer_connect.DeleteConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = developer_connect.DeleteConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_connection_rest_bad_request( + transport: str = "rest", request_type=developer_connect.DeleteConnectionRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_connection(request) + + +def test_delete_connection_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_connection_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + developer_connect.DeleteConnectionRequest(), + name="name_value", + ) + + +def test_delete_connection_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.CreateGitRepositoryLinkRequest, + dict, + ], +) +def test_create_git_repository_link_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/connections/sample3"} + request_init["git_repository_link"] = { + "name": "name_value", + "clone_uri": "clone_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "etag": "etag_value", + "reconciling": True, + "annotations": {}, + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = developer_connect.CreateGitRepositoryLinkRequest.meta.fields[ + "git_repository_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["git_repository_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["git_repository_link"][field])): + del request_init["git_repository_link"][field][i][subfield] + else: + del request_init["git_repository_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_git_repository_link(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_git_repository_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_git_repository_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_git_repository_link + ] = mock_rpc + + request = {} + client.create_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_git_repository_link_rest_required_fields( + request_type=developer_connect.CreateGitRepositoryLinkRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["git_repository_link_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "gitRepositoryLinkId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_git_repository_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "gitRepositoryLinkId" in jsonified_request + assert ( + jsonified_request["gitRepositoryLinkId"] + == request_init["git_repository_link_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["gitRepositoryLinkId"] = "git_repository_link_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_git_repository_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "git_repository_link_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "gitRepositoryLinkId" in jsonified_request + assert jsonified_request["gitRepositoryLinkId"] == "git_repository_link_id_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_git_repository_link(request) + + expected_params = [ + ( + "gitRepositoryLinkId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_git_repository_link_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_git_repository_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "gitRepositoryLinkId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "gitRepositoryLink", + "gitRepositoryLinkId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_git_repository_link_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_create_git_repository_link" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_create_git_repository_link" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.CreateGitRepositoryLinkRequest.pb( + developer_connect.CreateGitRepositoryLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = developer_connect.CreateGitRepositoryLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_git_repository_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_git_repository_link_rest_bad_request( + transport: str = "rest", + request_type=developer_connect.CreateGitRepositoryLinkRequest, +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_git_repository_link(request) + + +def test_create_git_repository_link_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + git_repository_link=developer_connect.GitRepositoryLink(name="name_value"), + git_repository_link_id="git_repository_link_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_git_repository_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/connections/*}/gitRepositoryLinks" + % client.transport._host, + args[1], + ) + + +def test_create_git_repository_link_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_git_repository_link( + developer_connect.CreateGitRepositoryLinkRequest(), + parent="parent_value", + git_repository_link=developer_connect.GitRepositoryLink(name="name_value"), + git_repository_link_id="git_repository_link_id_value", + ) + + +def test_create_git_repository_link_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.DeleteGitRepositoryLinkRequest, + dict, + ], +) +def test_delete_git_repository_link_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_git_repository_link(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_git_repository_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_git_repository_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_git_repository_link + ] = mock_rpc + + request = {} + client.delete_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_git_repository_link_rest_required_fields( + request_type=developer_connect.DeleteGitRepositoryLinkRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_git_repository_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_git_repository_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_git_repository_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_git_repository_link_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_git_repository_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_git_repository_link_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_delete_git_repository_link" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_delete_git_repository_link" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.DeleteGitRepositoryLinkRequest.pb( + developer_connect.DeleteGitRepositoryLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = developer_connect.DeleteGitRepositoryLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_git_repository_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_git_repository_link_rest_bad_request( + transport: str = "rest", + request_type=developer_connect.DeleteGitRepositoryLinkRequest, +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_git_repository_link(request) + + +def test_delete_git_repository_link_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_git_repository_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*/gitRepositoryLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_git_repository_link_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_git_repository_link( + developer_connect.DeleteGitRepositoryLinkRequest(), + name="name_value", + ) + + +def test_delete_git_repository_link_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.ListGitRepositoryLinksRequest, + dict, + ], +) +def test_list_git_repository_links_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.ListGitRepositoryLinksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.ListGitRepositoryLinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_git_repository_links(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGitRepositoryLinksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_git_repository_links_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_git_repository_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_git_repository_links + ] = mock_rpc + + request = {} + client.list_git_repository_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_git_repository_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_git_repository_links_rest_required_fields( + request_type=developer_connect.ListGitRepositoryLinksRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_git_repository_links._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_git_repository_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.ListGitRepositoryLinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.ListGitRepositoryLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_git_repository_links(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_git_repository_links_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_git_repository_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_git_repository_links_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_list_git_repository_links" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_list_git_repository_links" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.ListGitRepositoryLinksRequest.pb( + developer_connect.ListGitRepositoryLinksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + developer_connect.ListGitRepositoryLinksResponse.to_json( + developer_connect.ListGitRepositoryLinksResponse() + ) + ) + + request = developer_connect.ListGitRepositoryLinksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.ListGitRepositoryLinksResponse() + + client.list_git_repository_links( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_git_repository_links_rest_bad_request( + transport: str = "rest", + request_type=developer_connect.ListGitRepositoryLinksRequest, +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_git_repository_links(request) + + +def test_list_git_repository_links_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.ListGitRepositoryLinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.ListGitRepositoryLinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_git_repository_links(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/connections/*}/gitRepositoryLinks" + % client.transport._host, + args[1], + ) + + +def test_list_git_repository_links_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_git_repository_links( + developer_connect.ListGitRepositoryLinksRequest(), + parent="parent_value", + ) + + +def test_list_git_repository_links_rest_pager(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + next_page_token="abc", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[], + next_page_token="def", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + ], + next_page_token="ghi", + ), + developer_connect.ListGitRepositoryLinksResponse( + git_repository_links=[ + developer_connect.GitRepositoryLink(), + developer_connect.GitRepositoryLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + developer_connect.ListGitRepositoryLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/connections/sample3" + } + + pager = client.list_git_repository_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, developer_connect.GitRepositoryLink) for i in results) + + pages = list(client.list_git_repository_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.GetGitRepositoryLinkRequest, + dict, + ], +) +def test_get_git_repository_link_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.GitRepositoryLink( + name="name_value", + clone_uri="clone_uri_value", + etag="etag_value", + reconciling=True, + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.GitRepositoryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_git_repository_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.GitRepositoryLink) + assert response.name == "name_value" + assert response.clone_uri == "clone_uri_value" + assert response.etag == "etag_value" + assert response.reconciling is True + assert response.uid == "uid_value" + + +def test_get_git_repository_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_git_repository_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_git_repository_link + ] = mock_rpc + + request = {} + client.get_git_repository_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_git_repository_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_git_repository_link_rest_required_fields( + request_type=developer_connect.GetGitRepositoryLinkRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_git_repository_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_git_repository_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.GitRepositoryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.GitRepositoryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_git_repository_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_git_repository_link_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_git_repository_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_git_repository_link_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_get_git_repository_link" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_get_git_repository_link" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.GetGitRepositoryLinkRequest.pb( + developer_connect.GetGitRepositoryLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = developer_connect.GitRepositoryLink.to_json( + developer_connect.GitRepositoryLink() + ) + + request = developer_connect.GetGitRepositoryLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.GitRepositoryLink() + + client.get_git_repository_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_git_repository_link_rest_bad_request( + transport: str = "rest", request_type=developer_connect.GetGitRepositoryLinkRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_git_repository_link(request) + + +def test_get_git_repository_link_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.GitRepositoryLink() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.GitRepositoryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_git_repository_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*/gitRepositoryLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_get_git_repository_link_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_git_repository_link( + developer_connect.GetGitRepositoryLinkRequest(), + name="name_value", + ) + + +def test_get_git_repository_link_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchReadWriteTokenRequest, + dict, + ], +) +def test_fetch_read_write_token_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchReadWriteTokenResponse( + token="token_value", + git_username="git_username_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_read_write_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchReadWriteTokenResponse) + assert response.token == "token_value" + assert response.git_username == "git_username_value" + + +def test_fetch_read_write_token_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_read_write_token + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_read_write_token + ] = mock_rpc + + request = {} + client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_read_write_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_read_write_token_rest_required_fields( + request_type=developer_connect.FetchReadWriteTokenRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["git_repository_link"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_read_write_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["gitRepositoryLink"] = "git_repository_link_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_read_write_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "gitRepositoryLink" in jsonified_request + assert jsonified_request["gitRepositoryLink"] == "git_repository_link_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchReadWriteTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.FetchReadWriteTokenResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_read_write_token(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_read_write_token_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_read_write_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("gitRepositoryLink",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_read_write_token_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_fetch_read_write_token" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_fetch_read_write_token" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.FetchReadWriteTokenRequest.pb( + developer_connect.FetchReadWriteTokenRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + developer_connect.FetchReadWriteTokenResponse.to_json( + developer_connect.FetchReadWriteTokenResponse() + ) + ) + + request = developer_connect.FetchReadWriteTokenRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.FetchReadWriteTokenResponse() + + client.fetch_read_write_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_read_write_token_rest_bad_request( + transport: str = "rest", request_type=developer_connect.FetchReadWriteTokenRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_read_write_token(request) + + +def test_fetch_read_write_token_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchReadWriteTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + git_repository_link="git_repository_link_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_read_write_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{git_repository_link=projects/*/locations/*/connections/*/gitRepositoryLinks/*}:fetchReadWriteToken" + % client.transport._host, + args[1], + ) + + +def test_fetch_read_write_token_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_write_token( + developer_connect.FetchReadWriteTokenRequest(), + git_repository_link="git_repository_link_value", + ) + + +def test_fetch_read_write_token_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchReadTokenRequest, + dict, + ], +) +def test_fetch_read_token_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchReadTokenResponse( + token="token_value", + git_username="git_username_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_read_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchReadTokenResponse) + assert response.token == "token_value" + assert response.git_username == "git_username_value" + + +def test_fetch_read_token_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_read_token in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_read_token + ] = mock_rpc + + request = {} + client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_read_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_read_token_rest_required_fields( + request_type=developer_connect.FetchReadTokenRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["git_repository_link"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_read_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["gitRepositoryLink"] = "git_repository_link_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_read_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "gitRepositoryLink" in jsonified_request + assert jsonified_request["gitRepositoryLink"] == "git_repository_link_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchReadTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_read_token(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_read_token_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_read_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("gitRepositoryLink",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_read_token_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_fetch_read_token" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_fetch_read_token" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.FetchReadTokenRequest.pb( + developer_connect.FetchReadTokenRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = developer_connect.FetchReadTokenResponse.to_json( + developer_connect.FetchReadTokenResponse() + ) + + request = developer_connect.FetchReadTokenRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.FetchReadTokenResponse() + + client.fetch_read_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_read_token_rest_bad_request( + transport: str = "rest", request_type=developer_connect.FetchReadTokenRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_read_token(request) + + +def test_fetch_read_token_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchReadTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + git_repository_link="git_repository_link_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_read_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{git_repository_link=projects/*/locations/*/connections/*/gitRepositoryLinks/*}:fetchReadToken" + % client.transport._host, + args[1], + ) + + +def test_fetch_read_token_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_token( + developer_connect.FetchReadTokenRequest(), + git_repository_link="git_repository_link_value", + ) + + +def test_fetch_read_token_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchLinkableGitRepositoriesRequest, + dict, + ], +) +def test_fetch_linkable_git_repositories_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchLinkableGitRepositoriesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchLinkableGitRepositoriesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_linkable_git_repositories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableGitRepositoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_linkable_git_repositories_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_linkable_git_repositories + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_linkable_git_repositories + ] = mock_rpc + + request = {} + client.fetch_linkable_git_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_linkable_git_repositories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_linkable_git_repositories_rest_required_fields( + request_type=developer_connect.FetchLinkableGitRepositoriesRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["connection"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_linkable_git_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["connection"] = "connection_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_linkable_git_repositories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "connection" in jsonified_request + assert jsonified_request["connection"] == "connection_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.FetchLinkableGitRepositoriesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_linkable_git_repositories(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_linkable_git_repositories_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_linkable_git_repositories._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("connection",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_linkable_git_repositories_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_fetch_linkable_git_repositories", + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "pre_fetch_linkable_git_repositories", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.FetchLinkableGitRepositoriesRequest.pb( + developer_connect.FetchLinkableGitRepositoriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + developer_connect.FetchLinkableGitRepositoriesResponse.to_json( + developer_connect.FetchLinkableGitRepositoriesResponse() + ) + ) + + request = developer_connect.FetchLinkableGitRepositoriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + + client.fetch_linkable_git_repositories( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_linkable_git_repositories_rest_bad_request( + transport: str = "rest", + request_type=developer_connect.FetchLinkableGitRepositoriesRequest, +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_linkable_git_repositories(request) + + +def test_fetch_linkable_git_repositories_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + connection="connection_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchLinkableGitRepositoriesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_linkable_git_repositories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{connection=projects/*/locations/*/connections/*}:fetchLinkableGitRepositories" + % client.transport._host, + args[1], + ) + + +def test_fetch_linkable_git_repositories_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_linkable_git_repositories( + developer_connect.FetchLinkableGitRepositoriesRequest(), + connection="connection_value", + ) + + +def test_fetch_linkable_git_repositories_rest_pager(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + next_page_token="abc", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[], + next_page_token="def", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + ], + next_page_token="ghi", + ), + developer_connect.FetchLinkableGitRepositoriesResponse( + linkable_git_repositories=[ + developer_connect.LinkableGitRepository(), + developer_connect.LinkableGitRepository(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + developer_connect.FetchLinkableGitRepositoriesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + + pager = client.fetch_linkable_git_repositories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, developer_connect.LinkableGitRepository) for i in results + ) + + pages = list( + client.fetch_linkable_git_repositories(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchGitHubInstallationsRequest, + dict, + ], +) +def test_fetch_git_hub_installations_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchGitHubInstallationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchGitHubInstallationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_git_hub_installations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, developer_connect.FetchGitHubInstallationsResponse) + + +def test_fetch_git_hub_installations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_git_hub_installations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_git_hub_installations + ] = mock_rpc + + request = {} + client.fetch_git_hub_installations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_git_hub_installations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_git_hub_installations_rest_required_fields( + request_type=developer_connect.FetchGitHubInstallationsRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["connection"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_git_hub_installations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["connection"] = "connection_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_git_hub_installations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "connection" in jsonified_request + assert jsonified_request["connection"] == "connection_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchGitHubInstallationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.FetchGitHubInstallationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_git_hub_installations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_git_hub_installations_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_git_hub_installations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("connection",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_git_hub_installations_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_fetch_git_hub_installations" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_fetch_git_hub_installations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.FetchGitHubInstallationsRequest.pb( + developer_connect.FetchGitHubInstallationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + developer_connect.FetchGitHubInstallationsResponse.to_json( + developer_connect.FetchGitHubInstallationsResponse() + ) + ) + + request = developer_connect.FetchGitHubInstallationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.FetchGitHubInstallationsResponse() + + client.fetch_git_hub_installations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_git_hub_installations_rest_bad_request( + transport: str = "rest", + request_type=developer_connect.FetchGitHubInstallationsRequest, +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_git_hub_installations(request) + + +def test_fetch_git_hub_installations_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchGitHubInstallationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "connection": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + connection="connection_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchGitHubInstallationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_git_hub_installations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{connection=projects/*/locations/*/connections/*}:fetchGitHubInstallations" + % client.transport._host, + args[1], + ) + + +def test_fetch_git_hub_installations_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_hub_installations( + developer_connect.FetchGitHubInstallationsRequest(), + connection="connection_value", + ) + + +def test_fetch_git_hub_installations_rest_error(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + developer_connect.FetchGitRefsRequest, + dict, + ], +) +def test_fetch_git_refs_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchGitRefsResponse( + ref_names=["ref_names_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_git_refs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchGitRefsPager) + assert response.ref_names == ["ref_names_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_git_refs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_git_refs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.fetch_git_refs] = mock_rpc + + request = {} + client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_git_refs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_git_refs_rest_required_fields( + request_type=developer_connect.FetchGitRefsRequest, +): + transport_class = transports.DeveloperConnectRestTransport + + request_init = {} + request_init["git_repository_link"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_git_refs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["gitRepositoryLink"] = "git_repository_link_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_git_refs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "ref_type", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "gitRepositoryLink" in jsonified_request + assert jsonified_request["gitRepositoryLink"] == "git_repository_link_value" + + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchGitRefsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = developer_connect.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_git_refs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_git_refs_rest_unset_required_fields(): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_git_refs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "refType", + ) + ) + & set( + ( + "gitRepositoryLink", + "refType", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_git_refs_rest_interceptors(null_interceptor): + transport = transports.DeveloperConnectRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DeveloperConnectRestInterceptor(), + ) + client = DeveloperConnectClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_fetch_git_refs" + ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "pre_fetch_git_refs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = developer_connect.FetchGitRefsRequest.pb( + developer_connect.FetchGitRefsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = developer_connect.FetchGitRefsResponse.to_json( + developer_connect.FetchGitRefsResponse() + ) + + request = developer_connect.FetchGitRefsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = developer_connect.FetchGitRefsResponse() + + client.fetch_git_refs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_git_refs_rest_bad_request( + transport: str = "rest", request_type=developer_connect.FetchGitRefsRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_git_refs(request) + + +def test_fetch_git_refs_rest_flattened(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = developer_connect.FetchGitRefsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + git_repository_link="git_repository_link_value", + ref_type=developer_connect.FetchGitRefsRequest.RefType.TAG, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = developer_connect.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_git_refs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{git_repository_link=projects/*/locations/*/connections/*/gitRepositoryLinks/*}:fetchGitRefs" + % client.transport._host, + args[1], + ) + + +def test_fetch_git_refs_rest_flattened_error(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_refs( + developer_connect.FetchGitRefsRequest(), + git_repository_link="git_repository_link_value", + ref_type=developer_connect.FetchGitRefsRequest.RefType.TAG, + ) + + +def test_fetch_git_refs_rest_pager(transport: str = "rest"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[], + next_page_token="def", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + ], + next_page_token="ghi", + ), + developer_connect.FetchGitRefsResponse( + ref_names=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + developer_connect.FetchGitRefsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "git_repository_link": "projects/sample1/locations/sample2/connections/sample3/gitRepositoryLinks/sample4" + } + + pager = client.fetch_git_refs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.fetch_git_refs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DeveloperConnectGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DeveloperConnectGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DeveloperConnectClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DeveloperConnectGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DeveloperConnectClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DeveloperConnectClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DeveloperConnectGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DeveloperConnectClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DeveloperConnectGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DeveloperConnectClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DeveloperConnectGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DeveloperConnectGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectGrpcAsyncIOTransport, + transports.DeveloperConnectRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DeveloperConnectClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DeveloperConnectGrpcTransport, + ) + + +def test_developer_connect_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DeveloperConnectTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_developer_connect_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.developerconnect_v1.services.developer_connect.transports.DeveloperConnectTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DeveloperConnectTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_connections", + "get_connection", + "create_connection", + "update_connection", + "delete_connection", + "create_git_repository_link", + "delete_git_repository_link", + "list_git_repository_links", + "get_git_repository_link", + "fetch_read_write_token", + "fetch_read_token", + "fetch_linkable_git_repositories", + "fetch_git_hub_installations", + "fetch_git_refs", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_developer_connect_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.developerconnect_v1.services.developer_connect.transports.DeveloperConnectTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DeveloperConnectTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_developer_connect_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.developerconnect_v1.services.developer_connect.transports.DeveloperConnectTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DeveloperConnectTransport() + adc.assert_called_once() + + +def test_developer_connect_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DeveloperConnectClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectGrpcAsyncIOTransport, + ], +) +def test_developer_connect_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectGrpcAsyncIOTransport, + transports.DeveloperConnectRestTransport, + ], +) +def test_developer_connect_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DeveloperConnectGrpcTransport, grpc_helpers), + (transports.DeveloperConnectGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_developer_connect_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "developerconnect.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="developerconnect.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectGrpcAsyncIOTransport, + ], +) +def test_developer_connect_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_developer_connect_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DeveloperConnectRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_developer_connect_rest_lro_client(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_developer_connect_host_no_port(transport_name): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="developerconnect.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "developerconnect.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://developerconnect.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_developer_connect_host_with_port(transport_name): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="developerconnect.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "developerconnect.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://developerconnect.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_developer_connect_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DeveloperConnectClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DeveloperConnectClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_connections._session + session2 = client2.transport.list_connections._session + assert session1 != session2 + session1 = client1.transport.get_connection._session + session2 = client2.transport.get_connection._session + assert session1 != session2 + session1 = client1.transport.create_connection._session + session2 = client2.transport.create_connection._session + assert session1 != session2 + session1 = client1.transport.update_connection._session + session2 = client2.transport.update_connection._session + assert session1 != session2 + session1 = client1.transport.delete_connection._session + session2 = client2.transport.delete_connection._session + assert session1 != session2 + session1 = client1.transport.create_git_repository_link._session + session2 = client2.transport.create_git_repository_link._session + assert session1 != session2 + session1 = client1.transport.delete_git_repository_link._session + session2 = client2.transport.delete_git_repository_link._session + assert session1 != session2 + session1 = client1.transport.list_git_repository_links._session + session2 = client2.transport.list_git_repository_links._session + assert session1 != session2 + session1 = client1.transport.get_git_repository_link._session + session2 = client2.transport.get_git_repository_link._session + assert session1 != session2 + session1 = client1.transport.fetch_read_write_token._session + session2 = client2.transport.fetch_read_write_token._session + assert session1 != session2 + session1 = client1.transport.fetch_read_token._session + session2 = client2.transport.fetch_read_token._session + assert session1 != session2 + session1 = client1.transport.fetch_linkable_git_repositories._session + session2 = client2.transport.fetch_linkable_git_repositories._session + assert session1 != session2 + session1 = client1.transport.fetch_git_hub_installations._session + session2 = client2.transport.fetch_git_hub_installations._session + assert session1 != session2 + session1 = client1.transport.fetch_git_refs._session + session2 = client2.transport.fetch_git_refs._session + assert session1 != session2 + + +def test_developer_connect_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DeveloperConnectGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_developer_connect_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DeveloperConnectGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectGrpcAsyncIOTransport, + ], +) +def test_developer_connect_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeveloperConnectGrpcTransport, + transports.DeveloperConnectGrpcAsyncIOTransport, + ], +) +def test_developer_connect_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_developer_connect_grpc_lro_client(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_developer_connect_grpc_lro_async_client(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_path(): + project = "squid" + location = "clam" + connection = "whelk" + expected = ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + actual = DeveloperConnectClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = DeveloperConnectClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_connection_path(path) + assert expected == actual + + +def test_git_repository_link_path(): + project = "cuttlefish" + location = "mussel" + connection = "winkle" + git_repository_link = "nautilus" + expected = "projects/{project}/locations/{location}/connections/{connection}/gitRepositoryLinks/{git_repository_link}".format( + project=project, + location=location, + connection=connection, + git_repository_link=git_repository_link, + ) + actual = DeveloperConnectClient.git_repository_link_path( + project, location, connection, git_repository_link + ) + assert expected == actual + + +def test_parse_git_repository_link_path(): + expected = { + "project": "scallop", + "location": "abalone", + "connection": "squid", + "git_repository_link": "clam", + } + path = DeveloperConnectClient.git_repository_link_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_git_repository_link_path(path) + assert expected == actual + + +def test_secret_version_path(): + project = "whelk" + secret = "octopus" + secret_version = "oyster" + expected = "projects/{project}/secrets/{secret}/versions/{secret_version}".format( + project=project, + secret=secret, + secret_version=secret_version, + ) + actual = DeveloperConnectClient.secret_version_path(project, secret, secret_version) + assert expected == actual + + +def test_parse_secret_version_path(): + expected = { + "project": "nudibranch", + "secret": "cuttlefish", + "secret_version": "mussel", + } + path = DeveloperConnectClient.secret_version_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_secret_version_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DeveloperConnectClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DeveloperConnectClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DeveloperConnectClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DeveloperConnectClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DeveloperConnectClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DeveloperConnectClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = DeveloperConnectClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DeveloperConnectClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DeveloperConnectClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DeveloperConnectClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DeveloperConnectClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DeveloperConnectTransport, "_prep_wrapped_messages" + ) as prep: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DeveloperConnectTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DeveloperConnectClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DeveloperConnectClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DeveloperConnectAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DeveloperConnectClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DeveloperConnectClient, transports.DeveloperConnectGrpcTransport), + (DeveloperConnectAsyncClient, transports.DeveloperConnectGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dialogflow-cx/CHANGELOG.md b/packages/google-cloud-dialogflow-cx/CHANGELOG.md index 4406751a8aaa..1425784acbab 100644 --- a/packages/google-cloud-dialogflow-cx/CHANGELOG.md +++ b/packages/google-cloud-dialogflow-cx/CHANGELOG.md @@ -1,5 +1,50 @@ # Changelog +## [1.34.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.33.0...google-cloud-dialogflow-cx-v1.34.0) (2024-06-25) + + +### Features + +* A new field `action_parameters` is added to message `.google.cloud.dialogflow.cx.v3beta1.PlaybookInput` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `action_parameters` is added to message `.google.cloud.dialogflow.cx.v3beta1.PlaybookOutput` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `endpointing_timeout_duration` is added to message `.google.cloud.dialogflow.cx.v3beta1.AdvancedSettings` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `input_action_parameters` is added to message `.google.cloud.dialogflow.cx.v3beta1.FlowInvocation` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `input_action_parameters` is added to message `.google.cloud.dialogflow.cx.v3beta1.ToolUse` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `instruction` is added to message `.google.cloud.dialogflow.cx.v3beta1.Playbook` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `interdigit_timeout_duration` is added to message `.google.cloud.dialogflow.cx.v3beta1.AdvancedSettings` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `output_action_parameters` is added to message `.google.cloud.dialogflow.cx.v3beta1.FlowInvocation` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `output_action_parameters` is added to message `.google.cloud.dialogflow.cx.v3beta1.ToolUse` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `store_tts_audio` is added to message `.google.cloud.dialogflow.cx.v3beta1.SecuritySettings` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new field `tool_call` is added to message `.google.cloud.dialogflow.cx.v3beta1.ResponseMessage` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new message `Instruction` is added ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new message `ToolCall` is added ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A new value `PLAYBOOK` is added to enum `MatchType` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* added Conversation History API ([40d47d2](https://github.com/googleapis/google-cloud-python/commit/40d47d2ff0018e0ea4ddedfd7427e698aba3e533)) +* added Language Info to webhook Request ([40d47d2](https://github.com/googleapis/google-cloud-python/commit/40d47d2ff0018e0ea4ddedfd7427e698aba3e533)) + + +### Bug Fixes + +* An existing field `actions` is removed from message `.google.cloud.dialogflow.cx.v3beta1.Tool` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `input_parameters` is removed from message `.google.cloud.dialogflow.cx.v3beta1.FlowInvocation` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `input_parameters` is removed from message `.google.cloud.dialogflow.cx.v3beta1.ToolUse` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `output_parameters` is removed from message `.google.cloud.dialogflow.cx.v3beta1.FlowInvocation` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `output_parameters` is removed from message `.google.cloud.dialogflow.cx.v3beta1.ToolUse` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `parameters` is removed from message `.google.cloud.dialogflow.cx.v3beta1.PlaybookInput` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `parameters` is removed from message `.google.cloud.dialogflow.cx.v3beta1.PlaybookOutput` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `schemas` is removed from message `.google.cloud.dialogflow.cx.v3beta1.Tool` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `start_playbook` is moved in to oneof in message `.google.cloud.dialogflow.cx.v3beta1.Agent` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field `steps` is removed from message `.google.cloud.dialogflow.cx.v3beta1.Playbook` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing field start_flow is moved in to oneof in message `.google.cloud.dialogflow.cx.v3beta1.Agent` ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* An existing message `ActionParameter` is removed ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) + + +### Documentation + +* A comment for field `start_flow` in message `.google.cloud.dialogflow.cx.v3beta1.Agent` is changed ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* A comment for field `start_playbook` in message `.google.cloud.dialogflow.cx.v3beta1.Agent` is changed ([119b895](https://github.com/googleapis/google-cloud-python/commit/119b895d5e7bf87fcc86de278fd3ff1b9407c3f6)) +* improve doc for SecuritySetting.retention_window_days ([40d47d2](https://github.com/googleapis/google-cloud-python/commit/40d47d2ff0018e0ea4ddedfd7427e698aba3e533)) + ## [1.33.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.32.1...google-cloud-dialogflow-cx-v1.33.0) (2024-03-27) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index 558c8aab67c5..bcfde67a3bef 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.34.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index 558c8aab67c5..bcfde67a3bef 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.34.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py index 9ca8a02a55a1..2dc59841b14c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py @@ -144,7 +144,6 @@ ) from .types.example import ( Action, - ActionParameter, AgentUtterance, CreateExampleRequest, DeleteExampleRequest, @@ -352,7 +351,7 @@ Tool, UpdateToolRequest, ) -from .types.tool_call import ToolCallResult +from .types.tool_call import ToolCall, ToolCallResult from .types.transition_route_group import ( CreateTransitionRouteGroupRequest, DeleteTransitionRouteGroupRequest, @@ -414,7 +413,6 @@ "VersionsAsyncClient", "WebhooksAsyncClient", "Action", - "ActionParameter", "AdvancedSettings", "Agent", "AgentUtterance", @@ -690,6 +688,7 @@ "TextInput", "TextToSpeechSettings", "Tool", + "ToolCall", "ToolCallResult", "ToolUse", "ToolsClient", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py index 558c8aab67c5..bcfde67a3bef 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.34.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/async_client.py index 84a26f592313..bb8c2f9ee711 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/async_client.py @@ -563,6 +563,7 @@ async def sample_create_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" @@ -700,6 +701,7 @@ async def sample_update_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py index 2a1925e72d7d..7d49fad60a1b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py @@ -1118,6 +1118,7 @@ def sample_create_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" @@ -1252,6 +1253,7 @@ def sample_update_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py index 891926dbe9bc..653b93229767 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py @@ -87,6 +87,8 @@ class FlowsAsyncClient: parse_intent_path = staticmethod(FlowsClient.parse_intent_path) page_path = staticmethod(FlowsClient.page_path) parse_page_path = staticmethod(FlowsClient.parse_page_path) + tool_path = staticmethod(FlowsClient.tool_path) + parse_tool_path = staticmethod(FlowsClient.parse_tool_path) transition_route_group_path = staticmethod(FlowsClient.transition_route_group_path) parse_transition_route_group_path = staticmethod( FlowsClient.parse_transition_route_group_path diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py index 9259b3a22ab6..61298efb4c04 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py @@ -293,6 +293,30 @@ def parse_page_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + agent: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/agents/{agent}/tools/{tool}".format( + project=project, + location=location, + agent=agent, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def transition_route_group_path( project: str, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py index 05d3d31ce1fd..a586118402d4 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py @@ -79,6 +79,8 @@ class PagesAsyncClient: parse_intent_path = staticmethod(PagesClient.parse_intent_path) page_path = staticmethod(PagesClient.page_path) parse_page_path = staticmethod(PagesClient.parse_page_path) + tool_path = staticmethod(PagesClient.tool_path) + parse_tool_path = staticmethod(PagesClient.parse_tool_path) transition_route_group_path = staticmethod(PagesClient.transition_route_group_path) parse_transition_route_group_path = staticmethod( PagesClient.parse_transition_route_group_path diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py index 5fa6b7454be2..81147a8ff58e 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py @@ -287,6 +287,30 @@ def parse_page_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + agent: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/agents/{agent}/tools/{tool}".format( + project=project, + location=location, + agent=agent, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def transition_route_group_path( project: str, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/async_client.py index 69a5b64e10a3..68df593ad54c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/async_client.py @@ -79,6 +79,8 @@ class TransitionRouteGroupsAsyncClient: parse_intent_path = staticmethod(TransitionRouteGroupsClient.parse_intent_path) page_path = staticmethod(TransitionRouteGroupsClient.page_path) parse_page_path = staticmethod(TransitionRouteGroupsClient.parse_page_path) + tool_path = staticmethod(TransitionRouteGroupsClient.tool_path) + parse_tool_path = staticmethod(TransitionRouteGroupsClient.parse_tool_path) transition_route_group_path = staticmethod( TransitionRouteGroupsClient.transition_route_group_path ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py index 5ca7709bab18..622da6492ba2 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py @@ -267,6 +267,30 @@ def parse_page_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + agent: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/agents/{agent}/tools/{tool}".format( + project=project, + location=location, + agent=agent, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def transition_route_group_path( project: str, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py index 4c5be81889b9..05fe9482653a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py @@ -106,7 +106,6 @@ ) from .example import ( Action, - ActionParameter, AgentUtterance, CreateExampleRequest, DeleteExampleRequest, @@ -314,7 +313,7 @@ Tool, UpdateToolRequest, ) -from .tool_call import ToolCallResult +from .tool_call import ToolCall, ToolCallResult from .transition_route_group import ( CreateTransitionRouteGroupRequest, DeleteTransitionRouteGroupRequest, @@ -430,7 +429,6 @@ "RunContinuousTestResponse", "UpdateEnvironmentRequest", "Action", - "ActionParameter", "AgentUtterance", "CreateExampleRequest", "DeleteExampleRequest", @@ -617,6 +615,7 @@ "ListToolsResponse", "Tool", "UpdateToolRequest", + "ToolCall", "ToolCallResult", "CreateTransitionRouteGroupRequest", "DeleteTransitionRouteGroupRequest", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py index 1f0e83d6d005..f7efb34d9bba 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py @@ -135,6 +135,12 @@ class DtmfSettings(proto.Message): finish_digit (str): The digit that terminates a DTMF digit sequence. + interdigit_timeout_duration (google.protobuf.duration_pb2.Duration): + Interdigit timeout setting for matching dtmf + input to regex. + endpointing_timeout_duration (google.protobuf.duration_pb2.Duration): + Endpoint timeout setting for matching dtmf + input to regex. """ enabled: bool = proto.Field( @@ -149,6 +155,16 @@ class DtmfSettings(proto.Message): proto.STRING, number=3, ) + interdigit_timeout_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + endpointing_timeout_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) class LoggingSettings(proto.Message): r"""Define behaviors on logging. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/agent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/agent.py index 463f37afa5d0..336ac02033b1 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/agent.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/agent.py @@ -82,6 +82,10 @@ class Agent(proto.Message): [TransitionRouteGroups][google.cloud.dialogflow.cx.v3beta1.TransitionRouteGroup] and so on to manage the conversation flows. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -123,22 +127,23 @@ class Agent(proto.Message): speech_to_text_settings (google.cloud.dialogflowcx_v3beta1.types.SpeechToTextSettings): Speech recognition related settings. start_flow (str): - Immutable. Name of the start flow in this agent. A start - flow will be automatically created when the agent is - created, and can only be deleted by deleting the agent. - Format: + Name of the start flow in this agent. A start flow will be + automatically created when the agent is created, and can + only be deleted by deleting the agent. Format: ``projects//locations//agents//flows/``. + Currently only the default start flow with id + "00000000-0000-0000-0000-000000000000" is allowed. + + This field is a member of `oneof`_ ``session_entry_resource``. start_playbook (str): - Optional. Name of the start playbook in this agent. A start - playbook will be automatically created when the agent is - created, and can only be deleted by deleting the agent. - Format: + Name of the start playbook in this agent. A start playbook + will be automatically created when the agent is created, and + can only be deleted by deleting the agent. Format: ``projects//locations//agents//playbooks/``. Currently only the default playbook with id "00000000-0000-0000-0000-000000000000" is allowed. - Only one of ``start_flow`` or ``start_playbook`` should be - set, but not both. + This field is a member of `oneof`_ ``session_entry_resource``. security_settings (str): Name of the [SecuritySettings][google.cloud.dialogflow.cx.v3beta1.SecuritySettings] @@ -334,10 +339,12 @@ class PersonalizationSettings(proto.Message): start_flow: str = proto.Field( proto.STRING, number=16, + oneof="session_entry_resource", ) start_playbook: str = proto.Field( proto.STRING, number=39, + oneof="session_entry_resource", ) security_settings: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/example.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/example.py index 498c86c77018..2f68694d2dd6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/example.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/example.py @@ -39,7 +39,6 @@ "UserUtterance", "AgentUtterance", "ToolUse", - "ActionParameter", "PlaybookInvocation", "FlowInvocation", }, @@ -327,19 +326,19 @@ class PlaybookInput(proto.Message): preceding_conversation_summary (str): Optional. Summary string of the preceding conversation for the child playbook invocation. - parameters (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ActionParameter]): + action_parameters (google.protobuf.struct_pb2.Struct): Optional. A list of input parameters for the - invocation. + action. """ preceding_conversation_summary: str = proto.Field( proto.STRING, number=1, ) - parameters: MutableSequence["ActionParameter"] = proto.RepeatedField( + action_parameters: struct_pb2.Struct = proto.Field( proto.MESSAGE, - number=2, - message="ActionParameter", + number=3, + message=struct_pb2.Struct, ) @@ -350,19 +349,19 @@ class PlaybookOutput(proto.Message): execution_summary (str): Optional. Summary string of the execution result of the child playbook. - parameters (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ActionParameter]): - Optional. A list of output parameters for the - invocation. + action_parameters (google.protobuf.struct_pb2.Struct): + Optional. A Struct object of output + parameters for the action. """ execution_summary: str = proto.Field( proto.STRING, number=1, ) - parameters: MutableSequence["ActionParameter"] = proto.RepeatedField( + action_parameters: struct_pb2.Struct = proto.Field( proto.MESSAGE, - number=3, - message="ActionParameter", + number=4, + message=struct_pb2.Struct, ) @@ -478,11 +477,12 @@ class ToolUse(proto.Message): action (str): Optional. Name of the action to be called during the tool use. - input_parameters (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ActionParameter]): - A list of input parameters for the action. - output_parameters (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ActionParameter]): - A list of output parameters generated by the + input_action_parameters (google.protobuf.struct_pb2.Struct): + Optional. A list of input parameters for the action. + output_action_parameters (google.protobuf.struct_pb2.Struct): + Optional. A list of output parameters + generated by the action. """ tool: str = proto.Field( @@ -493,36 +493,15 @@ class ToolUse(proto.Message): proto.STRING, number=2, ) - input_parameters: MutableSequence["ActionParameter"] = proto.RepeatedField( + input_action_parameters: struct_pb2.Struct = proto.Field( proto.MESSAGE, - number=3, - message="ActionParameter", - ) - output_parameters: MutableSequence["ActionParameter"] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="ActionParameter", - ) - - -class ActionParameter(proto.Message): - r"""Parameter associated with action. - - Attributes: - name (str): - Required. Name of the parameter. - value (google.protobuf.struct_pb2.Value): - Required. Value of the parameter. - """ - - name: str = proto.Field( - proto.STRING, - number=1, + number=5, + message=struct_pb2.Struct, ) - value: struct_pb2.Value = proto.Field( + output_action_parameters: struct_pb2.Struct = proto.Field( proto.MESSAGE, - number=2, - message=struct_pb2.Value, + number=6, + message=struct_pb2.Struct, ) @@ -573,12 +552,12 @@ class FlowInvocation(proto.Message): flow (str): Required. The unique identifier of the flow. Format: ``projects//locations//agents/``. - input_parameters (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ActionParameter]): - A list of input parameters for the flow - invocation. - output_parameters (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ActionParameter]): - A list of output parameters generated by the - flow invocation. + input_action_parameters (google.protobuf.struct_pb2.Struct): + Optional. A list of input parameters for the + flow. + output_action_parameters (google.protobuf.struct_pb2.Struct): + Optional. A list of output parameters + generated by the flow invocation. flow_state (google.cloud.dialogflowcx_v3beta1.types.OutputState): Required. Flow invocation's output state. """ @@ -587,15 +566,15 @@ class FlowInvocation(proto.Message): proto.STRING, number=1, ) - input_parameters: MutableSequence["ActionParameter"] = proto.RepeatedField( + input_action_parameters: struct_pb2.Struct = proto.Field( proto.MESSAGE, - number=2, - message="ActionParameter", + number=5, + message=struct_pb2.Struct, ) - output_parameters: MutableSequence["ActionParameter"] = proto.RepeatedField( + output_action_parameters: struct_pb2.Struct = proto.Field( proto.MESSAGE, - number=3, - message="ActionParameter", + number=6, + message=struct_pb2.Struct, ) flow_state: "OutputState" = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/playbook.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/playbook.py index 2aae3def4fbd..a1f2f978a919 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/playbook.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/playbook.py @@ -214,9 +214,8 @@ class Playbook(proto.Message): output_parameter_definitions (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.ParameterDefinition]): Optional. Defined structured output parameters for this playbook. - steps (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.Playbook.Step]): - Ordered list of step by step execution - instructions to accomplish target goal. + instruction (google.cloud.dialogflowcx_v3beta1.types.Playbook.Instruction): + Instruction to accomplish target goal. token_count (int): Output only. Estimated number of tokes current playbook takes when sent to the LLM. @@ -271,6 +270,21 @@ class Step(proto.Message): message="Playbook.Step", ) + class Instruction(proto.Message): + r"""Message of the Instruction of the playbook. + + Attributes: + steps (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.Playbook.Step]): + Ordered list of step by step execution + instructions to accomplish target goal. + """ + + steps: MutableSequence["Playbook.Step"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Playbook.Step", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -297,10 +311,10 @@ class Step(proto.Message): number=6, message=parameter_definition.ParameterDefinition, ) - steps: MutableSequence[Step] = proto.RepeatedField( + instruction: Instruction = proto.Field( proto.MESSAGE, - number=4, - message=Step, + number=17, + message=Instruction, ) token_count: int = proto.Field( proto.INT64, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/response_message.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/response_message.py index eb7329d0a0ef..33481361a60a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/response_message.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/response_message.py @@ -20,6 +20,8 @@ from google.protobuf import struct_pb2 # type: ignore import proto # type: ignore +from google.cloud.dialogflowcx_v3beta1.types import tool_call as gcdc_tool_call + __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3beta1", manifest={ @@ -120,6 +122,11 @@ class ResponseMessage(proto.Message): Represents info card for knowledge answers, to be better rendered in Dialogflow Messenger. + This field is a member of `oneof`_ ``message``. + tool_call (google.cloud.dialogflowcx_v3beta1.types.ToolCall): + Returns the definition of a tool call that + should be executed by the client. + This field is a member of `oneof`_ ``message``. channel (str): The channel which the response is associated with. Clients @@ -445,6 +452,12 @@ class KnowledgeInfoCard(proto.Message): oneof="message", message=KnowledgeInfoCard, ) + tool_call: gcdc_tool_call.ToolCall = proto.Field( + proto.MESSAGE, + number=22, + oneof="message", + message=gcdc_tool_call.ToolCall, + ) channel: str = proto.Field( proto.STRING, number=19, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/security_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/security_settings.py index 95f550e47f3c..4fb5dc0d37b0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/security_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/security_settings.py @@ -381,6 +381,9 @@ class AudioExportSettings(proto.Message): audio_format (google.cloud.dialogflowcx_v3beta1.types.SecuritySettings.AudioExportSettings.AudioFormat): File format for exported audio file. Currently only in telephony recordings. + store_tts_audio (bool): + Whether to store TTS audio. By default, TTS + audio from the virtual agent is not exported. """ class AudioFormat(proto.Enum): @@ -419,6 +422,10 @@ class AudioFormat(proto.Enum): number=4, enum="SecuritySettings.AudioExportSettings.AudioFormat", ) + store_tts_audio: bool = proto.Field( + proto.BOOL, + number=6, + ) class InsightsExportSettings(proto.Message): r"""Settings for exporting conversations to diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py index 7e16c45499b8..57422e5717ac 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py @@ -1782,6 +1782,9 @@ class MatchType(proto.Enum): Indicates an empty query. EVENT (6): The query directly triggered an event. + PLAYBOOK (9): + The query was handled by a + [``Playbook``][google.cloud.dialogflow.cx.v3beta1.Playbook]. """ MATCH_TYPE_UNSPECIFIED = 0 INTENT = 1 @@ -1790,6 +1793,7 @@ class MatchType(proto.Enum): NO_MATCH = 4 NO_INPUT = 5 EVENT = 6 + PLAYBOOK = 9 intent: gcdc_intent.Intent = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py index eb5bde3d9dbb..2adbf8c97e31 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py @@ -335,12 +335,6 @@ class Tool(proto.Message): description (str): Required. High level description of the Tool and its usage. - actions (MutableSequence[str]): - The list of derived action names for the - tool. - schemas (MutableSequence[str]): - The list of derived type schemas for the - tool. open_api_spec (google.cloud.dialogflowcx_v3beta1.types.Tool.OpenApiTool): OpenAPI specification of the Tool. @@ -706,14 +700,6 @@ class CACert(proto.Message): proto.STRING, number=3, ) - actions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - schemas: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) open_api_spec: OpenApiTool = proto.Field( proto.MESSAGE, number=4, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool_call.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool_call.py index 372fed702eef..f9c456e2c540 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool_call.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool_call.py @@ -23,11 +23,42 @@ __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3beta1", manifest={ + "ToolCall", "ToolCallResult", }, ) +class ToolCall(proto.Message): + r"""Represents a call of a specific tool's action with the + specified inputs. + + Attributes: + tool (str): + The [tool][Tool] associated with this call. Format: + ``projects//locations//agents//tools/``. + action (str): + The name of the tool's action associated with + this call. + input_parameters (google.protobuf.struct_pb2.Struct): + The action's input parameters. + """ + + tool: str = proto.Field( + proto.STRING, + number=1, + ) + action: str = proto.Field( + proto.STRING, + number=2, + ) + input_parameters: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + class ToolCallResult(proto.Message): r"""The result of calling a tool's action that has been executed by the client. diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_async.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_async.py index 8a0946034eb9..841fea59b884 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_async.py +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_async.py @@ -40,6 +40,7 @@ async def sample_create_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_sync.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_sync.py index 5f834be121dc..f5e0fd46de0d 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_sync.py +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_create_agent_sync.py @@ -40,6 +40,7 @@ def sample_create_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_async.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_async.py index 57496e5e5860..adc7b2c2886f 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_async.py +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_async.py @@ -40,6 +40,7 @@ async def sample_update_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_sync.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_sync.py index 0332830d634f..fe3c3a89d9ed 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_sync.py +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_agents_update_agent_sync.py @@ -40,6 +40,7 @@ def sample_update_agent(): # Initialize request argument(s) agent = dialogflowcx_v3beta1.Agent() + agent.start_flow = "start_flow_value" agent.display_name = "display_name_value" agent.default_language_code = "default_language_code_value" agent.time_zone = "time_zone_value" diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json index 1ea6a76db3ff..44805662d03f 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "0.1.0" + "version": "1.34.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json index 1283c5667dac..72b1a40cc456 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "0.1.0" + "version": "1.34.0" }, "snippets": [ { @@ -64,12 +64,12 @@ "regionTag": "dialogflow_v3beta1_generated_Agents_CreateAgent_async", "segments": [ { - "end": 57, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 58, "start": 27, "type": "SHORT" }, @@ -79,18 +79,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 52, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], @@ -148,12 +148,12 @@ "regionTag": "dialogflow_v3beta1_generated_Agents_CreateAgent_sync", "segments": [ { - "end": 57, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 58, "start": 27, "type": "SHORT" }, @@ -163,18 +163,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 52, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], @@ -1346,12 +1346,12 @@ "regionTag": "dialogflow_v3beta1_generated_Agents_UpdateAgent_async", "segments": [ { - "end": 56, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 57, "start": 27, "type": "SHORT" }, @@ -1361,18 +1361,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], @@ -1430,12 +1430,12 @@ "regionTag": "dialogflow_v3beta1_generated_Agents_UpdateAgent_sync", "segments": [ { - "end": 56, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 57, "start": 27, "type": "SHORT" }, @@ -1445,18 +1445,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py index 473a7ae5e0a7..36803a59f4a8 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py @@ -1481,13 +1481,13 @@ def test_list_agents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_agents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py index 737125a7a8d1..9199f8807b2f 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py @@ -1491,13 +1491,13 @@ def test_list_changelogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_changelogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py index 066782030dff..c143e59af280 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py @@ -1495,13 +1495,13 @@ def test_list_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py index 6f510a8dd7dd..cc26efa1dc0b 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py @@ -3173,13 +3173,13 @@ def test_list_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py index 2ba6777d9c78..e41bbb207a5e 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py @@ -1543,13 +1543,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3690,13 +3690,13 @@ def test_lookup_environment_history_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.lookup_environment_history(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4588,13 +4588,13 @@ def test_list_continuous_test_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_continuous_test_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py index e0ec1a46fad5..1265edb255cf 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py @@ -1498,13 +1498,13 @@ def test_list_experiments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_experiments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py index 819e2bdfaac6..b6229eb38cdf 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py @@ -2225,13 +2225,13 @@ def test_list_flows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_flows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py index 311ff8ae8aac..1827dadbca04 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py @@ -1492,13 +1492,13 @@ def test_list_generators_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_generators(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py index 95b53eca6368..132c7cd08485 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py @@ -1476,13 +1476,13 @@ def test_list_intents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_intents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py index 5af46c32995d..8df8ee56c4cf 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py @@ -1467,13 +1467,13 @@ def test_list_pages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_pages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py index 53e7608e3151..304d3f1d7eec 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py @@ -3001,13 +3001,13 @@ def test_list_security_settings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_security_settings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py index dcf7e92eab7a..6abe90244c57 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py @@ -1602,13 +1602,13 @@ def test_list_session_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_session_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py index d99c23c3d277..eb1d15c4e90d 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py @@ -1513,13 +1513,13 @@ def test_list_test_cases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_test_cases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5158,13 +5158,13 @@ def test_list_test_case_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_test_case_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py index 3e6658c1d223..2f9ca382a151 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py @@ -1639,13 +1639,13 @@ def test_list_transition_route_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transition_route_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py index 2b58c8d84e3e..4c42d59f9720 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py @@ -1488,13 +1488,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py index 7974498a9d33..ee7a0dbe0e55 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py @@ -1476,13 +1476,13 @@ def test_list_webhooks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_webhooks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py index 3af977a1ac28..d85d450daf2d 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py @@ -1482,13 +1482,13 @@ def test_list_agents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_agents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1663,13 +1663,12 @@ def test_get_agent(request_type, transport: str = "grpc"): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, enable_multi_language_training=True, locked=True, + start_flow="start_flow_value", ) response = client.get_agent(request) @@ -1688,8 +1687,6 @@ def test_get_agent(request_type, transport: str = "grpc"): assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -1800,8 +1797,6 @@ async def test_get_agent_empty_call_async(): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, @@ -1884,8 +1879,6 @@ async def test_get_agent_async( time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, @@ -1910,8 +1903,6 @@ async def test_get_agent_async( assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -2091,13 +2082,12 @@ def test_create_agent(request_type, transport: str = "grpc"): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, enable_multi_language_training=True, locked=True, + start_flow="start_flow_value", ) response = client.create_agent(request) @@ -2116,8 +2106,6 @@ def test_create_agent(request_type, transport: str = "grpc"): assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -2228,8 +2216,6 @@ async def test_create_agent_empty_call_async(): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, @@ -2314,8 +2300,6 @@ async def test_create_agent_async( time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, @@ -2340,8 +2324,6 @@ async def test_create_agent_async( assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -2531,13 +2513,12 @@ def test_update_agent(request_type, transport: str = "grpc"): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, enable_multi_language_training=True, locked=True, + start_flow="start_flow_value", ) response = client.update_agent(request) @@ -2556,8 +2537,6 @@ def test_update_agent(request_type, transport: str = "grpc"): assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -2664,8 +2643,6 @@ async def test_update_agent_empty_call_async(): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, @@ -2750,8 +2727,6 @@ async def test_update_agent_async( time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, @@ -2776,8 +2751,6 @@ async def test_update_agent_async( assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -5770,13 +5743,12 @@ def test_get_agent_rest(request_type): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, enable_multi_language_training=True, locked=True, + start_flow="start_flow_value", ) # Wrap the value into a proper Response obj @@ -5799,8 +5771,6 @@ def test_get_agent_rest(request_type): assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -6109,6 +6079,8 @@ def test_create_agent_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -6209,13 +6181,12 @@ def get_message_fields(field): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, enable_multi_language_training=True, locked=True, + start_flow="start_flow_value", ) # Wrap the value into a proper Response obj @@ -6238,8 +6209,6 @@ def get_message_fields(field): assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True @@ -6561,6 +6530,8 @@ def test_update_agent_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -6661,13 +6632,12 @@ def get_message_fields(field): time_zone="time_zone_value", description="description_value", avatar_uri="avatar_uri_value", - start_flow="start_flow_value", - start_playbook="start_playbook_value", security_settings="security_settings_value", enable_stackdriver_logging=True, enable_spell_correction=True, enable_multi_language_training=True, locked=True, + start_flow="start_flow_value", ) # Wrap the value into a proper Response obj @@ -6690,8 +6660,6 @@ def get_message_fields(field): assert response.time_zone == "time_zone_value" assert response.description == "description_value" assert response.avatar_uri == "avatar_uri_value" - assert response.start_flow == "start_flow_value" - assert response.start_playbook == "start_playbook_value" assert response.security_settings == "security_settings_value" assert response.enable_stackdriver_logging is True assert response.enable_spell_correction is True diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py index 7746ae168f7e..0187008a5366 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py @@ -1491,13 +1491,13 @@ def test_list_changelogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_changelogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py index 83fb55369181..56e770cdfe93 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py @@ -1632,13 +1632,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py index 614d970fe7ec..3cd5750906c0 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py @@ -1495,13 +1495,13 @@ def test_list_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py index cd0960ed7709..f310ee028130 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py @@ -3173,13 +3173,13 @@ def test_list_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py index 185553fd8f2c..0f950c851cba 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py @@ -1543,13 +1543,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3690,13 +3690,13 @@ def test_lookup_environment_history_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.lookup_environment_history(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4588,13 +4588,13 @@ def test_list_continuous_test_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_continuous_test_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py index 00ce6e05be3b..989a9275e21c 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py @@ -2236,13 +2236,13 @@ def test_list_examples_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_examples(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3200,23 +3200,11 @@ def test_create_example_rest(request_type): "name": "name_value", "playbook_input": { "preceding_conversation_summary": "preceding_conversation_summary_value", - "parameters": [ - { - "name": "name_value", - "value": { - "null_value": 0, - "number_value": 0.1285, - "string_value": "string_value_value", - "bool_value": True, - "struct_value": {"fields": {}}, - "list_value": {"values": {}}, - }, - } - ], + "action_parameters": {"fields": {}}, }, "playbook_output": { "execution_summary": "execution_summary_value", - "parameters": {}, + "action_parameters": {}, }, "actions": [ { @@ -3225,8 +3213,8 @@ def test_create_example_rest(request_type): "tool_use": { "tool": "tool_value", "action": "action_value", - "input_parameters": {}, - "output_parameters": {}, + "input_action_parameters": {}, + "output_action_parameters": {}, }, "playbook_invocation": { "playbook": "playbook_value", @@ -3236,8 +3224,8 @@ def test_create_example_rest(request_type): }, "flow_invocation": { "flow": "flow_value", - "input_parameters": {}, - "output_parameters": {}, + "input_action_parameters": {}, + "output_action_parameters": {}, "flow_state": 1, }, } @@ -4622,23 +4610,11 @@ def test_update_example_rest(request_type): "name": "projects/sample1/locations/sample2/agents/sample3/playbooks/sample4/examples/sample5", "playbook_input": { "preceding_conversation_summary": "preceding_conversation_summary_value", - "parameters": [ - { - "name": "name_value", - "value": { - "null_value": 0, - "number_value": 0.1285, - "string_value": "string_value_value", - "bool_value": True, - "struct_value": {"fields": {}}, - "list_value": {"values": {}}, - }, - } - ], + "action_parameters": {"fields": {}}, }, "playbook_output": { "execution_summary": "execution_summary_value", - "parameters": {}, + "action_parameters": {}, }, "actions": [ { @@ -4647,8 +4623,8 @@ def test_update_example_rest(request_type): "tool_use": { "tool": "tool_value", "action": "action_value", - "input_parameters": {}, - "output_parameters": {}, + "input_action_parameters": {}, + "output_action_parameters": {}, }, "playbook_invocation": { "playbook": "playbook_value", @@ -4658,8 +4634,8 @@ def test_update_example_rest(request_type): }, "flow_invocation": { "flow": "flow_value", - "input_parameters": {}, - "output_parameters": {}, + "input_action_parameters": {}, + "output_action_parameters": {}, "flow_state": 1, }, } diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py index 994f7bf3a8c8..edb954a2b88b 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py @@ -1498,13 +1498,13 @@ def test_list_experiments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_experiments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py index 6aad01474f40..d99ff930d96d 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py @@ -74,6 +74,7 @@ import_strategy, page, response_message, + tool_call, validation_message, ) from google.cloud.dialogflowcx_v3beta1.types import flow @@ -2229,13 +2230,13 @@ def test_list_flows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_flows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4838,6 +4839,11 @@ def test_create_flow_rest(request_type): "phone_number": "phone_number_value" }, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -4881,6 +4887,8 @@ def test_create_flow_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -6322,6 +6330,11 @@ def test_update_flow_rest(request_type): "phone_number": "phone_number_value" }, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -6365,6 +6378,8 @@ def test_update_flow_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -8876,12 +8891,43 @@ def test_parse_page_path(): assert expected == actual -def test_transition_route_group_path(): +def test_tool_path(): project = "scallop" location = "abalone" agent = "squid" - flow = "clam" - transition_route_group = "whelk" + tool = "clam" + expected = ( + "projects/{project}/locations/{location}/agents/{agent}/tools/{tool}".format( + project=project, + location=location, + agent=agent, + tool=tool, + ) + ) + actual = FlowsClient.tool_path(project, location, agent, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "whelk", + "location": "octopus", + "agent": "oyster", + "tool": "nudibranch", + } + path = FlowsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = FlowsClient.parse_tool_path(path) + assert expected == actual + + +def test_transition_route_group_path(): + project = "cuttlefish" + location = "mussel" + agent = "winkle" + flow = "nautilus" + transition_route_group = "scallop" expected = "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( project=project, location=location, @@ -8897,11 +8943,11 @@ def test_transition_route_group_path(): def test_parse_transition_route_group_path(): expected = { - "project": "octopus", - "location": "oyster", - "agent": "nudibranch", - "flow": "cuttlefish", - "transition_route_group": "mussel", + "project": "abalone", + "location": "squid", + "agent": "clam", + "flow": "whelk", + "transition_route_group": "octopus", } path = FlowsClient.transition_route_group_path(**expected) @@ -8911,10 +8957,10 @@ def test_parse_transition_route_group_path(): def test_webhook_path(): - project = "winkle" - location = "nautilus" - agent = "scallop" - webhook = "abalone" + project = "oyster" + location = "nudibranch" + agent = "cuttlefish" + webhook = "mussel" expected = "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( project=project, location=location, @@ -8927,10 +8973,10 @@ def test_webhook_path(): def test_parse_webhook_path(): expected = { - "project": "squid", - "location": "clam", - "agent": "whelk", - "webhook": "octopus", + "project": "winkle", + "location": "nautilus", + "agent": "scallop", + "webhook": "abalone", } path = FlowsClient.webhook_path(**expected) @@ -8940,7 +8986,7 @@ def test_parse_webhook_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8950,7 +8996,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = FlowsClient.common_billing_account_path(**expected) @@ -8960,7 +9006,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -8970,7 +9016,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = FlowsClient.common_folder_path(**expected) @@ -8980,7 +9026,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -8990,7 +9036,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = FlowsClient.common_organization_path(**expected) @@ -9000,7 +9046,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -9010,7 +9056,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = FlowsClient.common_project_path(**expected) @@ -9020,8 +9066,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -9032,8 +9078,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = FlowsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py index c4f8fa9ca4db..347d787f286c 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py @@ -1493,13 +1493,13 @@ def test_list_generators_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_generators(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py index b8286fc941ec..6cd1ba74091b 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py @@ -1476,13 +1476,13 @@ def test_list_intents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_intents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py index 2c43bd994c34..b411c16e6e3a 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py @@ -61,7 +61,7 @@ ) from google.cloud.dialogflowcx_v3beta1.types import page from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page -from google.cloud.dialogflowcx_v3beta1.types import response_message +from google.cloud.dialogflowcx_v3beta1.types import response_message, tool_call def client_cert_source_callback(): @@ -1468,13 +1468,13 @@ def test_list_pages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_pages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3874,6 +3874,11 @@ def test_create_page_rest(request_type): }, "telephony_transfer_call": {"phone_number": "phone_number_value"}, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -3915,6 +3920,8 @@ def test_create_page_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -4399,6 +4406,11 @@ def test_update_page_rest(request_type): }, "telephony_transfer_call": {"phone_number": "phone_number_value"}, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -4440,6 +4452,8 @@ def test_update_page_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -5833,12 +5847,43 @@ def test_parse_page_path(): assert expected == actual -def test_transition_route_group_path(): +def test_tool_path(): project = "scallop" location = "abalone" agent = "squid" - flow = "clam" - transition_route_group = "whelk" + tool = "clam" + expected = ( + "projects/{project}/locations/{location}/agents/{agent}/tools/{tool}".format( + project=project, + location=location, + agent=agent, + tool=tool, + ) + ) + actual = PagesClient.tool_path(project, location, agent, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "whelk", + "location": "octopus", + "agent": "oyster", + "tool": "nudibranch", + } + path = PagesClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = PagesClient.parse_tool_path(path) + assert expected == actual + + +def test_transition_route_group_path(): + project = "cuttlefish" + location = "mussel" + agent = "winkle" + flow = "nautilus" + transition_route_group = "scallop" expected = "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( project=project, location=location, @@ -5854,11 +5899,11 @@ def test_transition_route_group_path(): def test_parse_transition_route_group_path(): expected = { - "project": "octopus", - "location": "oyster", - "agent": "nudibranch", - "flow": "cuttlefish", - "transition_route_group": "mussel", + "project": "abalone", + "location": "squid", + "agent": "clam", + "flow": "whelk", + "transition_route_group": "octopus", } path = PagesClient.transition_route_group_path(**expected) @@ -5868,10 +5913,10 @@ def test_parse_transition_route_group_path(): def test_webhook_path(): - project = "winkle" - location = "nautilus" - agent = "scallop" - webhook = "abalone" + project = "oyster" + location = "nudibranch" + agent = "cuttlefish" + webhook = "mussel" expected = "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( project=project, location=location, @@ -5884,10 +5929,10 @@ def test_webhook_path(): def test_parse_webhook_path(): expected = { - "project": "squid", - "location": "clam", - "agent": "whelk", - "webhook": "octopus", + "project": "winkle", + "location": "nautilus", + "agent": "scallop", + "webhook": "abalone", } path = PagesClient.webhook_path(**expected) @@ -5897,7 +5942,7 @@ def test_parse_webhook_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5907,7 +5952,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = PagesClient.common_billing_account_path(**expected) @@ -5917,7 +5962,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -5927,7 +5972,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = PagesClient.common_folder_path(**expected) @@ -5937,7 +5982,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -5947,7 +5992,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = PagesClient.common_organization_path(**expected) @@ -5957,7 +6002,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -5967,7 +6012,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = PagesClient.common_project_path(**expected) @@ -5977,8 +6022,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5989,8 +6034,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = PagesClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py index ef191c11f094..734528b26801 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py @@ -2255,13 +2255,13 @@ def test_list_playbooks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_playbooks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4444,13 +4444,13 @@ def test_list_playbook_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_playbook_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5002,7 +5002,7 @@ def test_create_playbook_rest(request_type): {"name": "name_value", "type_": 1, "description": "description_value"} ], "output_parameter_definitions": {}, - "steps": [{"text": "text_value", "steps": {}}], + "instruction": {"steps": [{"text": "text_value", "steps": {}}]}, "token_count": 1193, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -6389,7 +6389,7 @@ def test_update_playbook_rest(request_type): {"name": "name_value", "type_": 1, "description": "description_value"} ], "output_parameter_definitions": {}, - "steps": [{"text": "text_value", "steps": {}}], + "instruction": {"steps": [{"text": "text_value", "steps": {}}]}, "token_count": 1193, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -6805,7 +6805,7 @@ def test_create_playbook_version_rest(request_type): {"name": "name_value", "type_": 1, "description": "description_value"} ], "output_parameter_definitions": {}, - "steps": [{"text": "text_value", "steps": {}}], + "instruction": {"steps": [{"text": "text_value", "steps": {}}]}, "token_count": 1193, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -6825,23 +6825,11 @@ def test_create_playbook_version_rest(request_type): "name": "name_value", "playbook_input": { "preceding_conversation_summary": "preceding_conversation_summary_value", - "parameters": [ - { - "name": "name_value", - "value": { - "null_value": 0, - "number_value": 0.1285, - "string_value": "string_value_value", - "bool_value": True, - "struct_value": {"fields": {}}, - "list_value": {"values": {}}, - }, - } - ], + "action_parameters": {"fields": {}}, }, "playbook_output": { "execution_summary": "execution_summary_value", - "parameters": {}, + "action_parameters": {}, }, "actions": [ { @@ -6850,8 +6838,8 @@ def test_create_playbook_version_rest(request_type): "tool_use": { "tool": "tool_value", "action": "action_value", - "input_parameters": {}, - "output_parameters": {}, + "input_action_parameters": {}, + "output_action_parameters": {}, }, "playbook_invocation": { "playbook": "playbook_value", @@ -6861,8 +6849,8 @@ def test_create_playbook_version_rest(request_type): }, "flow_invocation": { "flow": "flow_value", - "input_parameters": {}, - "output_parameters": {}, + "input_action_parameters": {}, + "output_action_parameters": {}, "flow_state": 1, }, } diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py index ef945419e17e..8b90238bd9a0 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py @@ -3001,13 +3001,13 @@ def test_list_security_settings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_security_settings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3567,6 +3567,7 @@ def test_create_security_settings_rest(request_type): "audio_export_pattern": "audio_export_pattern_value", "enable_audio_redaction": True, "audio_format": 1, + "store_tts_audio": True, }, "insights_export_settings": {"enable_insights_export": True}, } @@ -4344,6 +4345,7 @@ def test_update_security_settings_rest(request_type): "audio_export_pattern": "audio_export_pattern_value", "enable_audio_redaction": True, "audio_format": 1, + "store_tts_audio": True, }, "insights_export_settings": {"enable_insights_export": True}, } diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py index 94bb17a4e7a2..c0a5380deb18 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py @@ -1602,13 +1602,13 @@ def test_list_session_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_session_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py index 26abf15a99b2..d0eeec7dbdd0 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py @@ -1514,13 +1514,13 @@ def test_list_test_cases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_test_cases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5159,13 +5159,13 @@ def test_list_test_case_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_test_case_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6841,6 +6841,11 @@ def test_create_test_case_rest(request_type): "phone_number": "phone_number_value" }, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -6884,6 +6889,8 @@ def test_create_test_case_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -7486,6 +7493,11 @@ def test_update_test_case_rest(request_type): "phone_number": "phone_number_value" }, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -7529,6 +7541,8 @@ def test_update_test_case_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py index f719599d9058..b65585ada457 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py @@ -1086,8 +1086,6 @@ def test_create_tool(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) response = client.create_tool(request) @@ -1103,8 +1101,6 @@ def test_create_tool(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -1207,8 +1203,6 @@ async def test_create_tool_empty_call_async(): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) ) @@ -1285,8 +1279,6 @@ async def test_create_tool_async( name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) ) @@ -1303,8 +1295,6 @@ async def test_create_tool_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -1868,13 +1858,13 @@ def test_list_tools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2334,8 +2324,6 @@ def test_get_tool(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=tool.Tool.ToolType.CUSTOMIZED_TOOL, ) response = client.get_tool(request) @@ -2351,8 +2339,6 @@ def test_get_tool(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -2455,8 +2441,6 @@ async def test_get_tool_empty_call_async(): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=tool.Tool.ToolType.CUSTOMIZED_TOOL, ) ) @@ -2531,8 +2515,6 @@ async def test_get_tool_async( name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=tool.Tool.ToolType.CUSTOMIZED_TOOL, ) ) @@ -2549,8 +2531,6 @@ async def test_get_tool_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -2722,8 +2702,6 @@ def test_update_tool(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) response = client.update_tool(request) @@ -2739,8 +2717,6 @@ def test_update_tool(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -2839,8 +2815,6 @@ async def test_update_tool_empty_call_async(): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) ) @@ -2917,8 +2891,6 @@ async def test_update_tool_async( name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) ) @@ -2935,8 +2907,6 @@ async def test_update_tool_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -3466,8 +3436,6 @@ def test_create_tool_rest(request_type): "name": "name_value", "display_name": "display_name_value", "description": "description_value", - "actions": ["actions_value1", "actions_value2"], - "schemas": ["schemas_value1", "schemas_value2"], "open_api_spec": { "text_schema": "text_schema_value", "authentication": { @@ -3576,8 +3544,6 @@ def get_message_fields(field): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) @@ -3597,8 +3563,6 @@ def get_message_fields(field): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -4512,8 +4476,6 @@ def test_get_tool_rest(request_type): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=tool.Tool.ToolType.CUSTOMIZED_TOOL, ) @@ -4533,8 +4495,6 @@ def test_get_tool_rest(request_type): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == tool.Tool.ToolType.CUSTOMIZED_TOOL @@ -4820,8 +4780,6 @@ def test_update_tool_rest(request_type): "name": "projects/sample1/locations/sample2/agents/sample3/tools/sample4", "display_name": "display_name_value", "description": "description_value", - "actions": ["actions_value1", "actions_value2"], - "schemas": ["schemas_value1", "schemas_value2"], "open_api_spec": { "text_schema": "text_schema_value", "authentication": { @@ -4930,8 +4888,6 @@ def get_message_fields(field): name="name_value", display_name="display_name_value", description="description_value", - actions=["actions_value"], - schemas=["schemas_value"], tool_type=gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL, ) @@ -4951,8 +4907,6 @@ def get_message_fields(field): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.actions == ["actions_value"] - assert response.schemas == ["schemas_value"] assert response.tool_type == gcdc_tool.Tool.ToolType.CUSTOMIZED_TOOL diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py index ac0eb242e7aa..7a3e17920cb3 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py @@ -59,6 +59,7 @@ gcs, page, response_message, + tool_call, ) from google.cloud.dialogflowcx_v3beta1.types import ( transition_route_group as gcdc_transition_route_group, @@ -1640,13 +1641,13 @@ def test_list_transition_route_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_transition_route_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4207,6 +4208,11 @@ def test_create_transition_route_group_rest(request_type): "phone_number": "phone_number_value" }, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -4250,6 +4256,8 @@ def test_create_transition_route_group_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -4722,6 +4730,11 @@ def test_update_transition_route_group_rest(request_type): "phone_number": "phone_number_value" }, "knowledge_info_card": {}, + "tool_call": { + "tool": "tool_value", + "action": "action_value", + "input_parameters": {}, + }, "channel": "channel_value", } ], @@ -4765,6 +4778,8 @@ def test_update_transition_route_group_rest(request_type): "enabled": True, "max_digits": 1065, "finish_digit": "finish_digit_value", + "interdigit_timeout_duration": {}, + "endpointing_timeout_duration": {}, }, "logging_settings": { "enable_stackdriver_logging": True, @@ -6140,12 +6155,43 @@ def test_parse_page_path(): assert expected == actual -def test_transition_route_group_path(): +def test_tool_path(): project = "whelk" location = "octopus" agent = "oyster" - flow = "nudibranch" - transition_route_group = "cuttlefish" + tool = "nudibranch" + expected = ( + "projects/{project}/locations/{location}/agents/{agent}/tools/{tool}".format( + project=project, + location=location, + agent=agent, + tool=tool, + ) + ) + actual = TransitionRouteGroupsClient.tool_path(project, location, agent, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "agent": "winkle", + "tool": "nautilus", + } + path = TransitionRouteGroupsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = TransitionRouteGroupsClient.parse_tool_path(path) + assert expected == actual + + +def test_transition_route_group_path(): + project = "scallop" + location = "abalone" + agent = "squid" + flow = "clam" + transition_route_group = "whelk" expected = "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( project=project, location=location, @@ -6161,11 +6207,11 @@ def test_transition_route_group_path(): def test_parse_transition_route_group_path(): expected = { - "project": "mussel", - "location": "winkle", - "agent": "nautilus", - "flow": "scallop", - "transition_route_group": "abalone", + "project": "octopus", + "location": "oyster", + "agent": "nudibranch", + "flow": "cuttlefish", + "transition_route_group": "mussel", } path = TransitionRouteGroupsClient.transition_route_group_path(**expected) @@ -6175,10 +6221,10 @@ def test_parse_transition_route_group_path(): def test_webhook_path(): - project = "squid" - location = "clam" - agent = "whelk" - webhook = "octopus" + project = "winkle" + location = "nautilus" + agent = "scallop" + webhook = "abalone" expected = "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( project=project, location=location, @@ -6191,10 +6237,10 @@ def test_webhook_path(): def test_parse_webhook_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "agent": "cuttlefish", - "webhook": "mussel", + "project": "squid", + "location": "clam", + "agent": "whelk", + "webhook": "octopus", } path = TransitionRouteGroupsClient.webhook_path(**expected) @@ -6204,7 +6250,7 @@ def test_parse_webhook_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6214,7 +6260,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = TransitionRouteGroupsClient.common_billing_account_path(**expected) @@ -6224,7 +6270,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -6234,7 +6280,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = TransitionRouteGroupsClient.common_folder_path(**expected) @@ -6244,7 +6290,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -6254,7 +6300,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = TransitionRouteGroupsClient.common_organization_path(**expected) @@ -6264,7 +6310,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -6274,7 +6320,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = TransitionRouteGroupsClient.common_project_path(**expected) @@ -6284,8 +6330,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6296,8 +6342,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = TransitionRouteGroupsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py index 65bbab12a1a0..45cfa7606fb5 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py @@ -1488,13 +1488,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py index 49870bdaf080..ead955184063 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py @@ -1476,13 +1476,13 @@ def test_list_webhooks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_webhooks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py index 00e6960518ea..fcc6cd1ca084 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py @@ -2668,13 +2668,13 @@ def test_search_agents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_agents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py index 35a392e579fb..a6f369682f8f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py @@ -1556,13 +1556,13 @@ def test_list_answer_records_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_answer_records(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py index 133578accb48..d60c0f494213 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py @@ -1476,13 +1476,13 @@ def test_list_contexts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_contexts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py index 0e9a66b9de5a..fa3a462d2113 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py @@ -2457,13 +2457,13 @@ def test_list_conversation_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversation_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py index 881072b5840d..e49a8275cdb8 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py @@ -2430,13 +2430,13 @@ def test_list_conversation_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversation_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4441,13 +4441,13 @@ def test_list_conversation_model_evaluations_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversation_model_evaluations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py index 44370cc15f38..a70f092d15cd 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py @@ -1640,13 +1640,13 @@ def test_list_conversation_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversation_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py index fd60a060f9f7..eb249bf56c62 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py @@ -1988,13 +1988,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3371,13 +3371,13 @@ def test_list_messages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_messages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py index 342433423455..7d4abe6224ed 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py @@ -1502,13 +1502,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py index 5d1cf5b07d3b..92c1d4f3e065 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py @@ -1539,13 +1539,13 @@ def test_list_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py index a7df23f9b3aa..cf81a8bfb53f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py @@ -1530,13 +1530,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3273,13 +3273,13 @@ def test_get_environment_history_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.get_environment_history(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py index 563ad6137e6e..fc2cb5a6b3c0 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py @@ -1488,13 +1488,13 @@ def test_list_intents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_intents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py index 46c9370267a3..04c333b2d9bb 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py @@ -1558,13 +1558,13 @@ def test_list_knowledge_bases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_knowledge_bases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py index 059138a596b9..8f26422836d1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py @@ -2335,13 +2335,13 @@ def test_list_participants_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_participants(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py index c0c6fd533bf8..610cef8c5ee3 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py @@ -1602,13 +1602,13 @@ def test_list_session_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_session_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py index 680cafc72607..867029c3fe63 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py @@ -1476,13 +1476,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py index a7097319a309..8c418417beb2 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py @@ -2668,13 +2668,13 @@ def test_search_agents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_agents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py index 08c5047afbe9..0170e90e68ca 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py @@ -1859,13 +1859,13 @@ def test_list_answer_records_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_answer_records(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py index e1aabd3a76e7..7e7836d92dae 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py @@ -1476,13 +1476,13 @@ def test_list_contexts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_contexts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py index 68a1397ca854..2ca9e5a9e71f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py @@ -1640,13 +1640,13 @@ def test_list_conversation_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversation_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py index 3591c167bb87..6ebfd4e8d8f1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py @@ -1992,13 +1992,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3760,13 +3760,13 @@ def test_list_messages_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_messages(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py index 15f085069fa2..f1de050479fb 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py @@ -1502,13 +1502,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py index 6a7671ef87d9..fa6d3edbbe47 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py @@ -1539,13 +1539,13 @@ def test_list_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py index bf6bb95f2348..3da203bd38c3 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py @@ -1530,13 +1530,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3273,13 +3273,13 @@ def test_get_environment_history_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.get_environment_history(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py index 3a048cf3af10..7df1da7be7f9 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py @@ -1488,13 +1488,13 @@ def test_list_intents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_intents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py index 17e28a2b120f..3f2bf4688b90 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py @@ -1558,13 +1558,13 @@ def test_list_knowledge_bases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_knowledge_bases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py index 4e140e64af45..ec2eea94fba1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py @@ -2332,13 +2332,13 @@ def test_list_participants_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_participants(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4970,13 +4970,13 @@ def test_list_suggestions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_suggestions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py index 770b5f1f9fde..d4a5bc57f8a7 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py @@ -1602,13 +1602,13 @@ def test_list_session_entity_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_session_entity_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py index 486ab7975e4d..d1f380174637 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py @@ -1476,13 +1476,13 @@ def test_list_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-discoveryengine/CHANGELOG.md b/packages/google-cloud-discoveryengine/CHANGELOG.md index fcedab2d43cf..2b9e9bfb4bb8 100644 --- a/packages/google-cloud-discoveryengine/CHANGELOG.md +++ b/packages/google-cloud-discoveryengine/CHANGELOG.md @@ -1,5 +1,40 @@ # Changelog +## [0.11.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.11.12...google-cloud-discoveryengine-v0.11.13) (2024-05-29) + + +### Features + +* add control service APIs ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) +* add custom model list API ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) +* add provision project API ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) +* return relevance score for chunk based search (alpha only) ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) +* support cancelling import operations ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) +* support multiple parent patterns for controls ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) +* support writing user events for blended engines ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) + + +### Documentation + +* keep the API doc up-to-date with recent changes ([5980ee6](https://github.com/googleapis/google-cloud-python/commit/5980ee6c11023b02e231f09aab134a07b64c745a)) + +## [0.11.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.11.11...google-cloud-discoveryengine-v0.11.12) (2024-05-27) + + +### Features + +* add control service APIs ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) +* promote answer APIs to v1 GA ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) +* promote grounding check APIs to v1 GA ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) +* promote ranking APIs to v1 GA ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) +* support cancelling import operations ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) +* support writing user events for blended engines ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) + + +### Documentation + +* keep the API doc up-to-date with recent changes ([ef8e38a](https://github.com/googleapis/google-cloud-python/commit/ef8e38a8552601c3605bf8768d72b437737c16d5)) + ## [0.11.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.11.10...google-cloud-discoveryengine-v0.11.11) (2024-04-15) diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/control_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/control_service.rst new file mode 100644 index 000000000000..063a3a695d9e --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/control_service.rst @@ -0,0 +1,10 @@ +ControlService +-------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.control_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.control_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/grounded_generation_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/grounded_generation_service.rst new file mode 100644 index 000000000000..f23e30da1dad --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/grounded_generation_service.rst @@ -0,0 +1,6 @@ +GroundedGenerationService +------------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.grounded_generation_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/project_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/project_service.rst new file mode 100644 index 000000000000..e28e38b17c7a --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/project_service.rst @@ -0,0 +1,6 @@ +ProjectService +-------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.project_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/rank_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/rank_service.rst new file mode 100644 index 000000000000..5dcf003fa5d1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/rank_service.rst @@ -0,0 +1,6 @@ +RankService +----------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.rank_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst index 35f5180cc288..6e0bbcda2e72 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst @@ -4,10 +4,14 @@ Services for Google Cloud Discoveryengine v1 API :maxdepth: 2 completion_service + control_service conversational_search_service data_store_service document_service engine_service + grounded_generation_service + project_service + rank_service recommendation_service schema_service search_service diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/control_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/control_service.rst new file mode 100644 index 000000000000..226b1e7ddd7e --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/control_service.rst @@ -0,0 +1,10 @@ +ControlService +-------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1alpha.services.control_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1alpha.services.control_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst index 60d803aa16b3..eb7b1c46f036 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst @@ -6,6 +6,7 @@ Services for Google Cloud Discoveryengine v1alpha API acl_config_service chunk_service completion_service + control_service conversational_search_service data_store_service document_service diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/control_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/control_service.rst new file mode 100644 index 000000000000..2f167ea49481 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/control_service.rst @@ -0,0 +1,10 @@ +ControlService +-------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.control_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.control_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/project_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/project_service.rst new file mode 100644 index 000000000000..24b4e3fdeb29 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/project_service.rst @@ -0,0 +1,6 @@ +ProjectService +-------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.project_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst index fc408aab147f..216a1c4e58c6 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst @@ -4,11 +4,13 @@ Services for Google Cloud Discoveryengine v1beta API :maxdepth: 2 completion_service + control_service conversational_search_service data_store_service document_service engine_service grounded_generation_service + project_service rank_service recommendation_service schema_service diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py index c26cfa7ca75f..0687094e6a64 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py @@ -24,6 +24,12 @@ from google.cloud.discoveryengine_v1beta.services.completion_service.client import ( CompletionServiceClient, ) +from google.cloud.discoveryengine_v1beta.services.control_service.async_client import ( + ControlServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.control_service.client import ( + ControlServiceClient, +) from google.cloud.discoveryengine_v1beta.services.conversational_search_service.async_client import ( ConversationalSearchServiceAsyncClient, ) @@ -54,6 +60,12 @@ from google.cloud.discoveryengine_v1beta.services.grounded_generation_service.client import ( GroundedGenerationServiceClient, ) +from google.cloud.discoveryengine_v1beta.services.project_service.async_client import ( + ProjectServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.project_service.client import ( + ProjectServiceClient, +) from google.cloud.discoveryengine_v1beta.services.rank_service.async_client import ( RankServiceAsyncClient, ) @@ -111,6 +123,7 @@ Interval, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) @@ -119,6 +132,15 @@ CompleteQueryRequest, CompleteQueryResponse, ) +from google.cloud.discoveryengine_v1beta.types.control import Condition, Control +from google.cloud.discoveryengine_v1beta.types.control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from google.cloud.discoveryengine_v1beta.types.conversation import ( Conversation, ConversationContext, @@ -145,6 +167,9 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from google.cloud.discoveryengine_v1beta.types.custom_tuning_model import ( + CustomTuningModel, +) from google.cloud.discoveryengine_v1beta.types.data_store import DataStore from google.cloud.discoveryengine_v1beta.types.data_store_service import ( CreateDataStoreMetadata, @@ -210,6 +235,11 @@ ImportUserEventsResponse, SpannerSource, ) +from google.cloud.discoveryengine_v1beta.types.project import Project +from google.cloud.discoveryengine_v1beta.types.project_service import ( + ProvisionProjectMetadata, + ProvisionProjectRequest, +) from google.cloud.discoveryengine_v1beta.types.purge_config import ( PurgeDocumentsMetadata, PurgeDocumentsRequest, @@ -244,6 +274,8 @@ SearchResponse, ) from google.cloud.discoveryengine_v1beta.types.search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, TrainCustomModelMetadata, TrainCustomModelRequest, TrainCustomModelResponse, @@ -308,6 +340,8 @@ __all__ = ( "CompletionServiceClient", "CompletionServiceAsyncClient", + "ControlServiceClient", + "ControlServiceAsyncClient", "ConversationalSearchServiceClient", "ConversationalSearchServiceAsyncClient", "DataStoreServiceClient", @@ -318,6 +352,8 @@ "EngineServiceAsyncClient", "GroundedGenerationServiceClient", "GroundedGenerationServiceAsyncClient", + "ProjectServiceClient", + "ProjectServiceAsyncClient", "RankServiceClient", "RankServiceAsyncClient", "RecommendationServiceClient", @@ -343,10 +379,19 @@ "IndustryVertical", "SearchAddOn", "SearchTier", + "SearchUseCase", "SolutionType", "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", + "Condition", + "Control", + "CreateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + "UpdateControlRequest", "Conversation", "ConversationContext", "ConversationMessage", @@ -369,6 +414,7 @@ "ListSessionsResponse", "UpdateConversationRequest", "UpdateSessionRequest", + "CustomTuningModel", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -423,6 +469,9 @@ "ImportUserEventsRequest", "ImportUserEventsResponse", "SpannerSource", + "Project", + "ProvisionProjectMetadata", + "ProvisionProjectRequest", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", @@ -446,6 +495,8 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "TrainCustomModelMetadata", "TrainCustomModelRequest", "TrainCustomModelResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py index 1d8e773dcbae..14ab21922940 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py @@ -22,6 +22,7 @@ CompletionServiceAsyncClient, CompletionServiceClient, ) +from .services.control_service import ControlServiceAsyncClient, ControlServiceClient from .services.conversational_search_service import ( ConversationalSearchServiceAsyncClient, ConversationalSearchServiceClient, @@ -32,6 +33,12 @@ ) from .services.document_service import DocumentServiceAsyncClient, DocumentServiceClient from .services.engine_service import EngineServiceAsyncClient, EngineServiceClient +from .services.grounded_generation_service import ( + GroundedGenerationServiceAsyncClient, + GroundedGenerationServiceClient, +) +from .services.project_service import ProjectServiceAsyncClient, ProjectServiceClient +from .services.rank_service import RankServiceAsyncClient, RankServiceClient from .services.recommendation_service import ( RecommendationServiceAsyncClient, RecommendationServiceClient, @@ -46,17 +53,28 @@ UserEventServiceAsyncClient, UserEventServiceClient, ) +from .types.answer import Answer from .types.common import ( CustomAttribute, IndustryVertical, Interval, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) from .types.completion import SuggestionDenyListEntry from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse +from .types.control import Condition, Control +from .types.control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from .types.conversation import ( Conversation, ConversationContext, @@ -65,14 +83,23 @@ TextInput, ) from .types.conversational_search_service import ( + AnswerQueryRequest, + AnswerQueryResponse, ConverseConversationRequest, ConverseConversationResponse, CreateConversationRequest, + CreateSessionRequest, DeleteConversationRequest, + DeleteSessionRequest, + GetAnswerRequest, GetConversationRequest, + GetSessionRequest, ListConversationsRequest, ListConversationsResponse, + ListSessionsRequest, + ListSessionsResponse, UpdateConversationRequest, + UpdateSessionRequest, ) from .types.data_store import DataStore from .types.data_store_service import ( @@ -86,6 +113,7 @@ UpdateDataStoreRequest, ) from .types.document import Document +from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( CreateDocumentRequest, DeleteDocumentRequest, @@ -105,6 +133,12 @@ ListEnginesResponse, UpdateEngineRequest, ) +from .types.grounded_generation_service import ( + CheckGroundingRequest, + CheckGroundingResponse, + CheckGroundingSpec, +) +from .types.grounding import FactChunk, GroundingFact from .types.import_config import ( BigQuerySource, BigtableOptions, @@ -125,6 +159,8 @@ ImportUserEventsResponse, SpannerSource, ) +from .types.project import Project +from .types.project_service import ProvisionProjectMetadata, ProvisionProjectRequest from .types.purge_config import ( PurgeDocumentsMetadata, PurgeDocumentsRequest, @@ -133,6 +169,7 @@ PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, ) +from .types.rank_service import RankingRecord, RankRequest, RankResponse from .types.recommendation_service import RecommendRequest, RecommendResponse from .types.schema import Schema from .types.schema_service import ( @@ -147,6 +184,7 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.session import Query, Session from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .types.site_search_engine_service import ( BatchCreateTargetSiteMetadata, @@ -191,15 +229,22 @@ __all__ = ( "CompletionServiceAsyncClient", + "ControlServiceAsyncClient", "ConversationalSearchServiceAsyncClient", "DataStoreServiceAsyncClient", "DocumentServiceAsyncClient", "EngineServiceAsyncClient", + "GroundedGenerationServiceAsyncClient", + "ProjectServiceAsyncClient", + "RankServiceAsyncClient", "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", + "Answer", + "AnswerQueryRequest", + "AnswerQueryResponse", "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", @@ -209,18 +254,25 @@ "BigQuerySource", "BigtableOptions", "BigtableSource", + "CheckGroundingRequest", + "CheckGroundingResponse", + "CheckGroundingSpec", "CloudSqlSource", "CollectUserEventRequest", "CompleteQueryRequest", "CompleteQueryResponse", "CompletionInfo", "CompletionServiceClient", + "Condition", + "Control", + "ControlServiceClient", "Conversation", "ConversationContext", "ConversationMessage", "ConversationalSearchServiceClient", "ConverseConversationRequest", "ConverseConversationResponse", + "CreateControlRequest", "CreateConversationRequest", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -229,11 +281,13 @@ "CreateEngineRequest", "CreateSchemaMetadata", "CreateSchemaRequest", + "CreateSessionRequest", "CreateTargetSiteMetadata", "CreateTargetSiteRequest", "CustomAttribute", "DataStore", "DataStoreServiceClient", + "DeleteControlRequest", "DeleteConversationRequest", "DeleteDataStoreMetadata", "DeleteDataStoreRequest", @@ -242,6 +296,7 @@ "DeleteEngineRequest", "DeleteSchemaMetadata", "DeleteSchemaRequest", + "DeleteSessionRequest", "DeleteTargetSiteMetadata", "DeleteTargetSiteRequest", "DisableAdvancedSiteSearchMetadata", @@ -249,24 +304,31 @@ "DisableAdvancedSiteSearchResponse", "Document", "DocumentInfo", + "DocumentProcessingConfig", "DocumentServiceClient", "EnableAdvancedSiteSearchMetadata", "EnableAdvancedSiteSearchRequest", "EnableAdvancedSiteSearchResponse", "Engine", "EngineServiceClient", + "FactChunk", "FetchDomainVerificationStatusRequest", "FetchDomainVerificationStatusResponse", "FhirStoreSource", "FirestoreSource", "GcsSource", + "GetAnswerRequest", + "GetControlRequest", "GetConversationRequest", "GetDataStoreRequest", "GetDocumentRequest", "GetEngineRequest", "GetSchemaRequest", + "GetSessionRequest", "GetSiteSearchEngineRequest", "GetTargetSiteRequest", + "GroundedGenerationServiceClient", + "GroundingFact", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", @@ -279,6 +341,8 @@ "ImportUserEventsResponse", "IndustryVertical", "Interval", + "ListControlsRequest", + "ListControlsResponse", "ListConversationsRequest", "ListConversationsResponse", "ListDataStoresRequest", @@ -289,17 +353,28 @@ "ListEnginesResponse", "ListSchemasRequest", "ListSchemasResponse", + "ListSessionsRequest", + "ListSessionsResponse", "ListTargetSitesRequest", "ListTargetSitesResponse", "MediaInfo", "PageInfo", "PanelInfo", + "Project", + "ProjectServiceClient", + "ProvisionProjectMetadata", + "ProvisionProjectRequest", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "Query", + "RankRequest", + "RankResponse", + "RankServiceClient", + "RankingRecord", "RecommendRequest", "RecommendResponse", "RecommendationServiceClient", @@ -315,6 +390,8 @@ "SearchResponse", "SearchServiceClient", "SearchTier", + "SearchUseCase", + "Session", "SiteSearchEngine", "SiteSearchEngineServiceClient", "SiteVerificationInfo", @@ -324,12 +401,14 @@ "TargetSite", "TextInput", "TransactionInfo", + "UpdateControlRequest", "UpdateConversationRequest", "UpdateDataStoreRequest", "UpdateDocumentRequest", "UpdateEngineRequest", "UpdateSchemaMetadata", "UpdateSchemaRequest", + "UpdateSessionRequest", "UpdateTargetSiteMetadata", "UpdateTargetSiteRequest", "UserEvent", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json index 5b1a802b399c..b7c73f71fce0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json @@ -69,11 +69,110 @@ } } }, + "ControlService": { + "clients": { + "grpc": { + "libraryClient": "ControlServiceClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ControlServiceAsyncClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + }, + "rest": { + "libraryClient": "ControlServiceClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + } + } + }, "ConversationalSearchService": { "clients": { "grpc": { "libraryClient": "ConversationalSearchServiceClient", "rpcs": { + "AnswerQuery": { + "methods": [ + "answer_query" + ] + }, "ConverseConversation": { "methods": [ "converse_conversation" @@ -84,31 +183,66 @@ "create_conversation" ] }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, "DeleteConversation": { "methods": [ "delete_conversation" ] }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "GetAnswer": { + "methods": [ + "get_answer" + ] + }, "GetConversation": { "methods": [ "get_conversation" ] }, + "GetSession": { + "methods": [ + "get_session" + ] + }, "ListConversations": { "methods": [ "list_conversations" ] }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, "UpdateConversation": { "methods": [ "update_conversation" ] + }, + "UpdateSession": { + "methods": [ + "update_session" + ] } } }, "grpc-async": { "libraryClient": "ConversationalSearchServiceAsyncClient", "rpcs": { + "AnswerQuery": { + "methods": [ + "answer_query" + ] + }, "ConverseConversation": { "methods": [ "converse_conversation" @@ -119,31 +253,66 @@ "create_conversation" ] }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, "DeleteConversation": { "methods": [ "delete_conversation" ] }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "GetAnswer": { + "methods": [ + "get_answer" + ] + }, "GetConversation": { "methods": [ "get_conversation" ] }, + "GetSession": { + "methods": [ + "get_session" + ] + }, "ListConversations": { "methods": [ "list_conversations" ] }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, "UpdateConversation": { "methods": [ "update_conversation" ] + }, + "UpdateSession": { + "methods": [ + "update_session" + ] } } }, "rest": { "libraryClient": "ConversationalSearchServiceClient", "rpcs": { + "AnswerQuery": { + "methods": [ + "answer_query" + ] + }, "ConverseConversation": { "methods": [ "converse_conversation" @@ -154,25 +323,55 @@ "create_conversation" ] }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, "DeleteConversation": { "methods": [ "delete_conversation" ] }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "GetAnswer": { + "methods": [ + "get_answer" + ] + }, "GetConversation": { "methods": [ "get_conversation" ] }, + "GetSession": { + "methods": [ + "get_session" + ] + }, "ListConversations": { "methods": [ "list_conversations" ] }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, "UpdateConversation": { "methods": [ "update_conversation" ] + }, + "UpdateSession": { + "methods": [ + "update_session" + ] } } } @@ -490,6 +689,108 @@ } } }, + "GroundedGenerationService": { + "clients": { + "grpc": { + "libraryClient": "GroundedGenerationServiceClient", + "rpcs": { + "CheckGrounding": { + "methods": [ + "check_grounding" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GroundedGenerationServiceAsyncClient", + "rpcs": { + "CheckGrounding": { + "methods": [ + "check_grounding" + ] + } + } + }, + "rest": { + "libraryClient": "GroundedGenerationServiceClient", + "rpcs": { + "CheckGrounding": { + "methods": [ + "check_grounding" + ] + } + } + } + } + }, + "ProjectService": { + "clients": { + "grpc": { + "libraryClient": "ProjectServiceClient", + "rpcs": { + "ProvisionProject": { + "methods": [ + "provision_project" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ProjectServiceAsyncClient", + "rpcs": { + "ProvisionProject": { + "methods": [ + "provision_project" + ] + } + } + }, + "rest": { + "libraryClient": "ProjectServiceClient", + "rpcs": { + "ProvisionProject": { + "methods": [ + "provision_project" + ] + } + } + } + } + }, + "RankService": { + "clients": { + "grpc": { + "libraryClient": "RankServiceClient", + "rpcs": { + "Rank": { + "methods": [ + "rank" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RankServiceAsyncClient", + "rpcs": { + "Rank": { + "methods": [ + "rank" + ] + } + } + }, + "rest": { + "libraryClient": "RankServiceClient", + "rpcs": { + "Rank": { + "methods": [ + "rank" + ] + } + } + } + } + }, "RecommendationService": { "clients": { "grpc": { diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py index 0def26130da1..cc3bbb89b86c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py @@ -688,6 +688,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "CompletionServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py index 34d5942d375f..8af68de32b7a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py @@ -1120,6 +1120,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py index b624242ea6b6..94391e5072b2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py @@ -214,6 +214,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py index 356098ee8296..e1c70062a94f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py @@ -353,6 +353,23 @@ def purge_suggestion_deny_list_entries( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py index 1ac49a6d26e8..fd2f9fc9159d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py @@ -381,6 +381,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py index a03b292e97a6..f8a54d607608 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -179,6 +179,27 @@ def post_purge_suggestion_deny_list_entries( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -333,6 +354,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -816,6 +854,81 @@ def purge_suggestion_deny_list_entries( # In C++ this would require a dynamic_cast return self._PurgeSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/__init__.py new file mode 100644 index 000000000000..382ea5d38bcc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ControlServiceAsyncClient +from .client import ControlServiceClient + +__all__ = ( + "ControlServiceClient", + "ControlServiceAsyncClient", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py similarity index 60% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/async_client.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py index c08b1acbf92c..3bbec19d296d 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py @@ -36,76 +36,71 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version +from google.cloud.discoveryengine_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha import ( - pagers, -) -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import data_source -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.cloud.discoveryengine_v1.services.control_service import pagers +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service -from .client import MapsPlatformDatasetsV1AlphaClient -from .transports.base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport -from .transports.grpc_asyncio import MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport +from .client import ControlServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .transports.grpc_asyncio import ControlServiceGrpcAsyncIOTransport -class MapsPlatformDatasetsV1AlphaAsyncClient: - """Service definition for the Maps Platform Datasets API.""" +class ControlServiceAsyncClient: + """Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + """ - _client: MapsPlatformDatasetsV1AlphaClient + _client: ControlServiceClient # Copy defaults from the synchronous client for use here. # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MapsPlatformDatasetsV1AlphaClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MapsPlatformDatasetsV1AlphaClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ( - MapsPlatformDatasetsV1AlphaClient._DEFAULT_ENDPOINT_TEMPLATE - ) - _DEFAULT_UNIVERSE = MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE - - dataset_path = staticmethod(MapsPlatformDatasetsV1AlphaClient.dataset_path) - parse_dataset_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.parse_dataset_path - ) + DEFAULT_ENDPOINT = ControlServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ControlServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ControlServiceClient._DEFAULT_UNIVERSE + + control_path = staticmethod(ControlServiceClient.control_path) + parse_control_path = staticmethod(ControlServiceClient.parse_control_path) + data_store_path = staticmethod(ControlServiceClient.data_store_path) + parse_data_store_path = staticmethod(ControlServiceClient.parse_data_store_path) common_billing_account_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.common_billing_account_path + ControlServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.common_folder_path + ControlServiceClient.parse_common_billing_account_path ) + common_folder_path = staticmethod(ControlServiceClient.common_folder_path) parse_common_folder_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.parse_common_folder_path + ControlServiceClient.parse_common_folder_path ) common_organization_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.common_organization_path + ControlServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.parse_common_organization_path - ) - common_project_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.common_project_path + ControlServiceClient.parse_common_organization_path ) + common_project_path = staticmethod(ControlServiceClient.common_project_path) parse_common_project_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.parse_common_project_path - ) - common_location_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.common_location_path + ControlServiceClient.parse_common_project_path ) + common_location_path = staticmethod(ControlServiceClient.common_location_path) parse_common_location_path = staticmethod( - MapsPlatformDatasetsV1AlphaClient.parse_common_location_path + ControlServiceClient.parse_common_location_path ) @classmethod @@ -119,9 +114,9 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - MapsPlatformDatasetsV1AlphaAsyncClient: The constructed client. + ControlServiceAsyncClient: The constructed client. """ - return MapsPlatformDatasetsV1AlphaClient.from_service_account_info.__func__(MapsPlatformDatasetsV1AlphaAsyncClient, info, *args, **kwargs) # type: ignore + return ControlServiceClient.from_service_account_info.__func__(ControlServiceAsyncClient, info, *args, **kwargs) # type: ignore @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -135,9 +130,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - MapsPlatformDatasetsV1AlphaAsyncClient: The constructed client. + ControlServiceAsyncClient: The constructed client. """ - return MapsPlatformDatasetsV1AlphaClient.from_service_account_file.__func__(MapsPlatformDatasetsV1AlphaAsyncClient, filename, *args, **kwargs) # type: ignore + return ControlServiceClient.from_service_account_file.__func__(ControlServiceAsyncClient, filename, *args, **kwargs) # type: ignore from_service_account_json = from_service_account_file @@ -175,14 +170,14 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return MapsPlatformDatasetsV1AlphaClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return ControlServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore @property - def transport(self) -> MapsPlatformDatasetsV1AlphaTransport: + def transport(self) -> ControlServiceTransport: """Returns the transport used by the client instance. Returns: - MapsPlatformDatasetsV1AlphaTransport: The transport used by the client instance. + ControlServiceTransport: The transport used by the client instance. """ return self._client.transport @@ -206,8 +201,7 @@ def universe_domain(self) -> str: return self._client._universe_domain get_transport_class = functools.partial( - type(MapsPlatformDatasetsV1AlphaClient).get_transport_class, - type(MapsPlatformDatasetsV1AlphaClient), + type(ControlServiceClient).get_transport_class, type(ControlServiceClient) ) def __init__( @@ -215,16 +209,12 @@ def __init__( *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ - Union[ - str, - MapsPlatformDatasetsV1AlphaTransport, - Callable[..., MapsPlatformDatasetsV1AlphaTransport], - ] + Union[str, ControlServiceTransport, Callable[..., ControlServiceTransport]] ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the maps platform datasets v1 alpha async client. + """Instantiates the control service async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -232,10 +222,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,MapsPlatformDatasetsV1AlphaTransport,Callable[..., MapsPlatformDatasetsV1AlphaTransport]]]): + transport (Optional[Union[str,ControlServiceTransport,Callable[..., ControlServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport to use. If a Callable is given, it will be called with the same set of initialization - arguments as used in the MapsPlatformDatasetsV1AlphaTransport constructor. + arguments as used in the ControlServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -273,26 +263,30 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = MapsPlatformDatasetsV1AlphaClient( + self._client = ControlServiceClient( credentials=credentials, transport=transport, client_options=client_options, client_info=client_info, ) - async def create_dataset( + async def create_control( self, - request: Optional[ - Union[maps_platform_datasets.CreateDatasetRequest, dict] - ] = None, + request: Optional[Union[control_service.CreateControlRequest, dict]] = None, *, parent: Optional[str] = None, - dataset: Optional[gmm_dataset.Dataset] = None, + control: Optional[gcd_control.Control] = None, + control_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gmm_dataset.Dataset: - r"""Create a new dataset for the specified project. + ) -> gcd_control.Control: + r"""Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1.Control] to create + already exists, an ALREADY_EXISTS error is returned. .. code-block:: python @@ -303,38 +297,59 @@ async def create_dataset( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - async def sample_create_dataset(): + async def sample_create_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = discoveryengine_v1.ControlServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.CreateControlRequest( parent="parent_value", + control=control, + control_id="control_id_value", ) # Make the request - response = await client.create_dataset(request=request) + response = await client.create_control(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest, dict]]): - The request object. Request to create a maps dataset. + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateControlRequest, dict]]): + The request object. Request for CreateControl method. parent (:class:`str`): - Required. Parent project that will own the dataset. - Format: projects/{$project_number} + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - dataset (:class:`google.maps.mapsplatformdatasets_v1alpha.types.Dataset`): - Required. The dataset version to - create. + control (:class:`google.cloud.discoveryengine_v1.types.Control`): + Required. The Control to create. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control_id (:class:`str`): + Required. The ID to use for the Control, which will + become the final component of the Control's resource + name. - This corresponds to the ``dataset`` field + This value must be within 1-63 characters. Valid + characters are /[a-z][0-9]-_/. + + This corresponds to the ``control_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -344,15 +359,17 @@ async def sample_create_dataset(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.types.Dataset: - A representation of a maps platform - dataset. + google.cloud.discoveryengine_v1.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be + considered at serving time. Permitted actions + dependent on SolutionType. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, dataset]) + has_flattened_params = any([parent, control, control_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -361,20 +378,22 @@ async def sample_create_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.CreateDatasetRequest): - request = maps_platform_datasets.CreateDatasetRequest(request) + if not isinstance(request, control_service.CreateControlRequest): + request = control_service.CreateControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if dataset is not None: - request.dataset = dataset + if control is not None: + request.control = control + if control_id is not None: + request.control_id = control_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_dataset + self._client._transport.create_control ] # Certain fields should be provided within the metadata header; @@ -397,20 +416,19 @@ async def sample_create_dataset(): # Done; return the response. return response - async def update_dataset_metadata( + async def delete_control( self, - request: Optional[ - Union[maps_platform_datasets.UpdateDatasetMetadataRequest, dict] - ] = None, + request: Optional[Union[control_service.DeleteControlRequest, dict]] = None, *, - dataset: Optional[gmm_dataset.Dataset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gmm_dataset.Dataset: - r"""Update the metadata for the dataset. To update the - data use: UploadDataset. + ) -> None: + r"""Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1.Control] to + delete does not exist, a NOT_FOUND error is returned. .. code-block:: python @@ -421,39 +439,29 @@ async def update_dataset_metadata( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - async def sample_update_dataset_metadata(): + async def sample_delete_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = discoveryengine_v1.ControlServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + request = discoveryengine_v1.DeleteControlRequest( + name="name_value", ) # Make the request - response = await client.update_dataset_metadata(request=request) - - # Handle the response - print(response) + await client.delete_control(request=request) Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest, dict]]): - The request object. Request to update the metadata fields - of the dataset. - dataset (:class:`google.maps.mapsplatformdatasets_v1alpha.types.Dataset`): - Required. The dataset to update. The dataset's name is - used to identify the dataset to be updated. The name has - the format: projects/{project}/datasets/{dataset_id} - - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. Support the value "*" - for full replacement. + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteControlRequest, dict]]): + The request object. Request for DeleteControl method. + name (:class:`str`): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -461,17 +469,11 @@ async def sample_update_dataset_metadata(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.maps.mapsplatformdatasets_v1alpha.types.Dataset: - A representation of a maps platform - dataset. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([dataset, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -480,54 +482,53 @@ async def sample_update_dataset_metadata(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.UpdateDatasetMetadataRequest): - request = maps_platform_datasets.UpdateDatasetMetadataRequest(request) + if not isinstance(request, control_service.DeleteControlRequest): + request = control_service.DeleteControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if dataset is not None: - request.dataset = dataset - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_dataset_metadata + self._client._transport.delete_control ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("dataset.name", request.dataset.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - async def get_dataset( + async def update_control( self, - request: Optional[Union[maps_platform_datasets.GetDatasetRequest, dict]] = None, + request: Optional[Union[control_service.UpdateControlRequest, dict]] = None, *, - name: Optional[str] = None, + control: Optional[gcd_control.Control] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> dataset.Dataset: - r"""Get the published or latest version of the dataset. + ) -> gcd_control.Control: + r"""Updates a Control. + + [Control][google.cloud.discoveryengine.v1.Control] action type + cannot be changed. If the + [Control][google.cloud.discoveryengine.v1.Control] to update + does not exist, a NOT_FOUND error is returned. .. code-block:: python @@ -538,36 +539,49 @@ async def get_dataset( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - async def sample_get_dataset(): + async def sample_update_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = discoveryengine_v1.ControlServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.GetDatasetRequest( - name="name_value", + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.UpdateControlRequest( + control=control, ) # Make the request - response = await client.get_dataset(request=request) + response = await client.update_control(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest, dict]]): - The request object. Request to get the specified dataset. - name (:class:`str`): - Required. Resource name. Can also fetch a specified - version projects/{project}/datasets/{dataset_id} - projects/{project}/datasets/{dataset_id}@{version-id} + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateControlRequest, dict]]): + The request object. Request for UpdateControl method. + control (:class:`google.cloud.discoveryengine_v1.types.Control`): + Required. The Control to update. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1.Control] to + update. The following are NOT supported: - In order to retrieve a previous version of the dataset, - also provide the version ID. Example: - projects/123/datasets/assisted-driving-preferences@c7cfa2a8 + - [Control.name][google.cloud.discoveryengine.v1.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type] - This corresponds to the ``name`` field + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -577,15 +591,17 @@ async def sample_get_dataset(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.types.Dataset: - A representation of a maps platform - dataset. + google.cloud.discoveryengine_v1.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be + considered at serving time. Permitted actions + dependent on SolutionType. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([control, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -594,24 +610,28 @@ async def sample_get_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.GetDatasetRequest): - request = maps_platform_datasets.GetDatasetRequest(request) + if not isinstance(request, control_service.UpdateControlRequest): + request = control_service.UpdateControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if control is not None: + request.control = control + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_dataset + self._client._transport.update_control ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("control.name", request.control.name),) + ), ) # Validate the universe domain. @@ -628,18 +648,16 @@ async def sample_get_dataset(): # Done; return the response. return response - async def list_dataset_versions( + async def get_control( self, - request: Optional[ - Union[maps_platform_datasets.ListDatasetVersionsRequest, dict] - ] = None, + request: Optional[Union[control_service.GetControlRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatasetVersionsAsyncPager: - r"""List all the versions of a dataset. + ) -> control.Control: + r"""Gets a Control. .. code-block:: python @@ -650,31 +668,30 @@ async def list_dataset_versions( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - async def sample_list_dataset_versions(): + async def sample_get_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = discoveryengine_v1.ControlServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( + request = discoveryengine_v1.GetControlRequest( name="name_value", ) # Make the request - page_result = client.list_dataset_versions(request=request) + response = await client.get_control(request=request) # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest, dict]]): - The request object. Request to list of all versions of - the dataset. + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetControlRequest, dict]]): + The request object. Request for GetControl method. name (:class:`str`): - Required. The name of the dataset to - list all the versions for. + Required. The resource name of the Control to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -686,12 +703,11 @@ async def sample_list_dataset_versions(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsAsyncPager: - Response with list of all versions of - the dataset. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.cloud.discoveryengine_v1.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be + considered at serving time. Permitted actions + dependent on SolutionType. """ # Create or coerce a protobuf request object. @@ -706,8 +722,8 @@ async def sample_list_dataset_versions(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.ListDatasetVersionsRequest): - request = maps_platform_datasets.ListDatasetVersionsRequest(request) + if not isinstance(request, control_service.GetControlRequest): + request = control_service.GetControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -717,7 +733,7 @@ async def sample_list_dataset_versions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_dataset_versions + self._client._transport.get_control ] # Certain fields should be provided within the metadata header; @@ -737,30 +753,20 @@ async def sample_list_dataset_versions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatasetVersionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - async def list_datasets( + async def list_controls( self, - request: Optional[ - Union[maps_platform_datasets.ListDatasetsRequest, dict] - ] = None, + request: Optional[Union[control_service.ListControlsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatasetsAsyncPager: - r"""List all the datasets for the specified project. + ) -> pagers.ListControlsAsyncPager: + r"""Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. .. code-block:: python @@ -771,31 +777,32 @@ async def list_datasets( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - async def sample_list_datasets(): + async def sample_list_controls(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = discoveryengine_v1.ControlServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + request = discoveryengine_v1.ListControlsRequest( parent="parent_value", ) # Make the request - page_result = client.list_datasets(request=request) + page_result = client.list_controls(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest, dict]]): - The request object. Request to list datasets for the - project. + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListControlsRequest, dict]]): + The request object. Request for ListControls method. parent (:class:`str`): - Required. The name of the project to - list all the datasets for. + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -807,9 +814,9 @@ async def sample_list_datasets(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsAsyncPager: - Response to list datasets for the - project. + google.cloud.discoveryengine_v1.services.control_service.pagers.ListControlsAsyncPager: + Response for ListControls method. + Iterating over this object will yield results and resolve additional pages automatically. @@ -827,8 +834,8 @@ async def sample_list_datasets(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.ListDatasetsRequest): - request = maps_platform_datasets.ListDatasetsRequest(request) + if not isinstance(request, control_service.ListControlsRequest): + request = control_service.ListControlsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -838,7 +845,7 @@ async def sample_list_datasets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_datasets + self._client._transport.list_controls ] # Certain fields should be provided within the metadata header; @@ -860,7 +867,7 @@ async def sample_list_datasets(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListDatasetsAsyncPager( + response = pagers.ListControlsAsyncPager( method=rpc, request=request, response=response, @@ -870,86 +877,99 @@ async def sample_list_datasets(): # Done; return the response. return response - async def delete_dataset( + async def list_operations( self, - request: Optional[ - Union[maps_platform_datasets.DeleteDatasetRequest, dict] - ] = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete the specified dataset and optionally all its - corresponding versions. + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. - .. code-block:: python + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) - async def sample_delete_dataset(): - # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) - # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( - name="name_value", - ) + # Validate the universe domain. + self._client._validate_universe_domain() - # Make the request - await client.delete_dataset(request=request) + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest, dict]]): - The request object. Request to delete a dataset. + # Done; return the response. + return response - The dataset to be deleted. - name (:class:`str`): - Required. Format: - projects/${project}/datasets/{dataset_id} + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.DeleteDatasetRequest): - request = maps_platform_datasets.DeleteDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_dataset - ] + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -961,91 +981,55 @@ async def sample_delete_dataset(): self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def delete_dataset_version( + # Done; return the response. + return response + + async def cancel_operation( self, - request: Optional[ - Union[maps_platform_datasets.DeleteDatasetVersionRequest, dict] - ] = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Delete a specific version of the dataset. + r"""Starts asynchronous cancellation on a long-running operation. - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha - - async def sample_delete_dataset_version(): - # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() - - # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( - name="name_value", - ) - - # Make the request - await client.delete_dataset_version(request=request) + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. Args: - request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest, dict]]): - The request object. Request to delete a version of a - dataset. - name (:class:`str`): - Required. Format: - projects/${project}/datasets/{dataset_id}@{version-id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: + None """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.DeleteDatasetVersionRequest): - request = maps_platform_datasets.DeleteDatasetVersionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_dataset_version - ] + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1064,7 +1048,7 @@ async def sample_delete_dataset_version(): metadata=metadata, ) - async def __aenter__(self) -> "MapsPlatformDatasetsV1AlphaAsyncClient": + async def __aenter__(self) -> "ControlServiceAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): @@ -1076,4 +1060,4 @@ async def __aexit__(self, exc_type, exc, tb): ) -__all__ = ("MapsPlatformDatasetsV1AlphaAsyncClient",) +__all__ = ("ControlServiceAsyncClient",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py similarity index 70% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/client.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py index 01c014283d61..0c56bd556382 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py @@ -41,32 +41,31 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version +from google.cloud.discoveryengine_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha import ( - pagers, -) -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import data_source -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.cloud.discoveryengine_v1.services.control_service import pagers +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service -from .transports.base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport -from .transports.grpc import MapsPlatformDatasetsV1AlphaGrpcTransport -from .transports.grpc_asyncio import MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport -from .transports.rest import MapsPlatformDatasetsV1AlphaRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .transports.grpc import ControlServiceGrpcTransport +from .transports.grpc_asyncio import ControlServiceGrpcAsyncIOTransport +from .transports.rest import ControlServiceRestTransport -class MapsPlatformDatasetsV1AlphaClientMeta(type): - """Metaclass for the MapsPlatformDatasetsV1Alpha client. +class ControlServiceClientMeta(type): + """Metaclass for the ControlService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -75,17 +74,15 @@ class MapsPlatformDatasetsV1AlphaClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[MapsPlatformDatasetsV1AlphaTransport]] - _transport_registry["grpc"] = MapsPlatformDatasetsV1AlphaGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport - _transport_registry["rest"] = MapsPlatformDatasetsV1AlphaRestTransport + ) # type: Dict[str, Type[ControlServiceTransport]] + _transport_registry["grpc"] = ControlServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ControlServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ControlServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[MapsPlatformDatasetsV1AlphaTransport]: + ) -> Type[ControlServiceTransport]: """Returns an appropriate transport class. Args: @@ -104,10 +101,12 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class MapsPlatformDatasetsV1AlphaClient( - metaclass=MapsPlatformDatasetsV1AlphaClientMeta -): - """Service definition for the Maps Platform Datasets API.""" +class ControlServiceClient(metaclass=ControlServiceClientMeta): + """Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -140,12 +139,12 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "mapsplatformdatasets.googleapis.com" + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) - _DEFAULT_ENDPOINT_TEMPLATE = "mapsplatformdatasets.{UNIVERSE_DOMAIN}" + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @classmethod @@ -159,7 +158,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - MapsPlatformDatasetsV1AlphaClient: The constructed client. + ControlServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -177,7 +176,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - MapsPlatformDatasetsV1AlphaClient: The constructed client. + ControlServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -186,30 +185,59 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> MapsPlatformDatasetsV1AlphaTransport: + def transport(self) -> ControlServiceTransport: """Returns the transport used by the client instance. Returns: - MapsPlatformDatasetsV1AlphaTransport: The transport used by the client + ControlServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def dataset_path( + def control_path( project: str, - dataset: str, + location: str, + data_store: str, + control: str, + ) -> str: + """Returns a fully-qualified control string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/controls/{control}".format( + project=project, + location=location, + data_store=data_store, + control=control, + ) + + @staticmethod + def parse_control_path(path: str) -> Dict[str, str]: + """Parses a control path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/controls/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, ) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( project=project, - dataset=dataset, + location=location, + data_store=data_store, ) @staticmethod - def parse_dataset_path(path: str) -> Dict[str, str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -431,17 +459,15 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE + _default_universe = ControlServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = MapsPlatformDatasetsV1AlphaClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = ControlServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = ( - MapsPlatformDatasetsV1AlphaClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain - ) + api_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain ) return api_endpoint @@ -461,7 +487,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE + universe_domain = ControlServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -487,7 +513,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE + default_universe = ControlServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -511,7 +537,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or MapsPlatformDatasetsV1AlphaClient._compare_universes( + or ControlServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -540,16 +566,12 @@ def __init__( *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ - Union[ - str, - MapsPlatformDatasetsV1AlphaTransport, - Callable[..., MapsPlatformDatasetsV1AlphaTransport], - ] + Union[str, ControlServiceTransport, Callable[..., ControlServiceTransport]] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the maps platform datasets v1 alpha client. + """Instantiates the control service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -557,10 +579,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,MapsPlatformDatasetsV1AlphaTransport,Callable[..., MapsPlatformDatasetsV1AlphaTransport]]]): + transport (Optional[Union[str,ControlServiceTransport,Callable[..., ControlServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the MapsPlatformDatasetsV1AlphaTransport constructor. + arguments as used in the ControlServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -613,13 +635,11 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = MapsPlatformDatasetsV1AlphaClient._read_environment_variables() - self._client_cert_source = ( - MapsPlatformDatasetsV1AlphaClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert - ) + ) = ControlServiceClient._read_environment_variables() + self._client_cert_source = ControlServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert ) - self._universe_domain = MapsPlatformDatasetsV1AlphaClient._get_universe_domain( + self._universe_domain = ControlServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) self._api_endpoint = None # updated below, depending on `transport` @@ -636,9 +656,9 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MapsPlatformDatasetsV1AlphaTransport) + transport_provided = isinstance(transport, ControlServiceTransport) if transport_provided: - # transport is a MapsPlatformDatasetsV1AlphaTransport instance. + # transport is a ControlServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -649,12 +669,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(MapsPlatformDatasetsV1AlphaTransport, transport) + self._transport = cast(ControlServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( + or ControlServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -673,14 +693,11 @@ def __init__( ) transport_init: Union[ - Type[MapsPlatformDatasetsV1AlphaTransport], - Callable[..., MapsPlatformDatasetsV1AlphaTransport], + Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( type(self).get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast( - Callable[..., MapsPlatformDatasetsV1AlphaTransport], transport - ) + else cast(Callable[..., ControlServiceTransport], transport) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -695,19 +712,23 @@ def __init__( api_audience=self._client_options.api_audience, ) - def create_dataset( + def create_control( self, - request: Optional[ - Union[maps_platform_datasets.CreateDatasetRequest, dict] - ] = None, + request: Optional[Union[control_service.CreateControlRequest, dict]] = None, *, parent: Optional[str] = None, - dataset: Optional[gmm_dataset.Dataset] = None, + control: Optional[gcd_control.Control] = None, + control_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gmm_dataset.Dataset: - r"""Create a new dataset for the specified project. + ) -> gcd_control.Control: + r"""Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1.Control] to create + already exists, an ALREADY_EXISTS error is returned. .. code-block:: python @@ -718,38 +739,59 @@ def create_dataset( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - def sample_create_dataset(): + def sample_create_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = discoveryengine_v1.ControlServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.CreateControlRequest( parent="parent_value", + control=control, + control_id="control_id_value", ) # Make the request - response = client.create_dataset(request=request) + response = client.create_control(request=request) # Handle the response print(response) Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest, dict]): - The request object. Request to create a maps dataset. + request (Union[google.cloud.discoveryengine_v1.types.CreateControlRequest, dict]): + The request object. Request for CreateControl method. parent (str): - Required. Parent project that will own the dataset. - Format: projects/{$project_number} + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): - Required. The dataset version to - create. + control (google.cloud.discoveryengine_v1.types.Control): + Required. The Control to create. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control_id (str): + Required. The ID to use for the Control, which will + become the final component of the Control's resource + name. + + This value must be within 1-63 characters. Valid + characters are /[a-z][0-9]-_/. - This corresponds to the ``dataset`` field + This corresponds to the ``control_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -759,15 +801,17 @@ def sample_create_dataset(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.types.Dataset: - A representation of a maps platform - dataset. + google.cloud.discoveryengine_v1.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be + considered at serving time. Permitted actions + dependent on SolutionType. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, dataset]) + has_flattened_params = any([parent, control, control_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -776,18 +820,20 @@ def sample_create_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.CreateDatasetRequest): - request = maps_platform_datasets.CreateDatasetRequest(request) + if not isinstance(request, control_service.CreateControlRequest): + request = control_service.CreateControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if dataset is not None: - request.dataset = dataset + if control is not None: + request.control = control + if control_id is not None: + request.control_id = control_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dataset] + rpc = self._transport._wrapped_methods[self._transport.create_control] # Certain fields should be provided within the metadata header; # add these here. @@ -809,20 +855,19 @@ def sample_create_dataset(): # Done; return the response. return response - def update_dataset_metadata( + def delete_control( self, - request: Optional[ - Union[maps_platform_datasets.UpdateDatasetMetadataRequest, dict] - ] = None, + request: Optional[Union[control_service.DeleteControlRequest, dict]] = None, *, - dataset: Optional[gmm_dataset.Dataset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gmm_dataset.Dataset: - r"""Update the metadata for the dataset. To update the - data use: UploadDataset. + ) -> None: + r"""Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1.Control] to + delete does not exist, a NOT_FOUND error is returned. .. code-block:: python @@ -833,39 +878,29 @@ def update_dataset_metadata( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - def sample_update_dataset_metadata(): + def sample_delete_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = discoveryengine_v1.ControlServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + request = discoveryengine_v1.DeleteControlRequest( + name="name_value", ) # Make the request - response = client.update_dataset_metadata(request=request) - - # Handle the response - print(response) + client.delete_control(request=request) Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest, dict]): - The request object. Request to update the metadata fields - of the dataset. - dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): - Required. The dataset to update. The dataset's name is - used to identify the dataset to be updated. The name has - the format: projects/{project}/datasets/{dataset_id} - - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. Support the value "*" - for full replacement. + request (Union[google.cloud.discoveryengine_v1.types.DeleteControlRequest, dict]): + The request object. Request for DeleteControl method. + name (str): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -873,17 +908,11 @@ def sample_update_dataset_metadata(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.maps.mapsplatformdatasets_v1alpha.types.Dataset: - A representation of a maps platform - dataset. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([dataset, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -892,51 +921,50 @@ def sample_update_dataset_metadata(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.UpdateDatasetMetadataRequest): - request = maps_platform_datasets.UpdateDatasetMetadataRequest(request) + if not isinstance(request, control_service.DeleteControlRequest): + request = control_service.DeleteControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if dataset is not None: - request.dataset = dataset - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_dataset_metadata] + rpc = self._transport._wrapped_methods[self._transport.delete_control] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("dataset.name", request.dataset.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def get_dataset( + def update_control( self, - request: Optional[Union[maps_platform_datasets.GetDatasetRequest, dict]] = None, + request: Optional[Union[control_service.UpdateControlRequest, dict]] = None, *, - name: Optional[str] = None, + control: Optional[gcd_control.Control] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> dataset.Dataset: - r"""Get the published or latest version of the dataset. + ) -> gcd_control.Control: + r"""Updates a Control. + + [Control][google.cloud.discoveryengine.v1.Control] action type + cannot be changed. If the + [Control][google.cloud.discoveryengine.v1.Control] to update + does not exist, a NOT_FOUND error is returned. .. code-block:: python @@ -947,36 +975,49 @@ def get_dataset( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - def sample_get_dataset(): + def sample_update_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = discoveryengine_v1.ControlServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.GetDatasetRequest( - name="name_value", + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.UpdateControlRequest( + control=control, ) # Make the request - response = client.get_dataset(request=request) + response = client.update_control(request=request) # Handle the response print(response) Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest, dict]): - The request object. Request to get the specified dataset. - name (str): - Required. Resource name. Can also fetch a specified - version projects/{project}/datasets/{dataset_id} - projects/{project}/datasets/{dataset_id}@{version-id} + request (Union[google.cloud.discoveryengine_v1.types.UpdateControlRequest, dict]): + The request object. Request for UpdateControl method. + control (google.cloud.discoveryengine_v1.types.Control): + Required. The Control to update. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1.Control] to + update. The following are NOT supported: - In order to retrieve a previous version of the dataset, - also provide the version ID. Example: - projects/123/datasets/assisted-driving-preferences@c7cfa2a8 + - [Control.name][google.cloud.discoveryengine.v1.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type] - This corresponds to the ``name`` field + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -986,15 +1027,17 @@ def sample_get_dataset(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.types.Dataset: - A representation of a maps platform - dataset. + google.cloud.discoveryengine_v1.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be + considered at serving time. Permitted actions + dependent on SolutionType. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([control, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1003,21 +1046,25 @@ def sample_get_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.GetDatasetRequest): - request = maps_platform_datasets.GetDatasetRequest(request) + if not isinstance(request, control_service.UpdateControlRequest): + request = control_service.UpdateControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if control is not None: + request.control = control + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dataset] + rpc = self._transport._wrapped_methods[self._transport.update_control] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("control.name", request.control.name),) + ), ) # Validate the universe domain. @@ -1034,18 +1081,16 @@ def sample_get_dataset(): # Done; return the response. return response - def list_dataset_versions( + def get_control( self, - request: Optional[ - Union[maps_platform_datasets.ListDatasetVersionsRequest, dict] - ] = None, + request: Optional[Union[control_service.GetControlRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatasetVersionsPager: - r"""List all the versions of a dataset. + ) -> control.Control: + r"""Gets a Control. .. code-block:: python @@ -1056,31 +1101,30 @@ def list_dataset_versions( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - def sample_list_dataset_versions(): + def sample_get_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = discoveryengine_v1.ControlServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( + request = discoveryengine_v1.GetControlRequest( name="name_value", ) # Make the request - page_result = client.list_dataset_versions(request=request) + response = client.get_control(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest, dict]): - The request object. Request to list of all versions of - the dataset. + request (Union[google.cloud.discoveryengine_v1.types.GetControlRequest, dict]): + The request object. Request for GetControl method. name (str): - Required. The name of the dataset to - list all the versions for. + Required. The resource name of the Control to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1092,12 +1136,11 @@ def sample_list_dataset_versions(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsPager: - Response with list of all versions of - the dataset. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.cloud.discoveryengine_v1.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be + considered at serving time. Permitted actions + dependent on SolutionType. """ # Create or coerce a protobuf request object. @@ -1112,8 +1155,8 @@ def sample_list_dataset_versions(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.ListDatasetVersionsRequest): - request = maps_platform_datasets.ListDatasetVersionsRequest(request) + if not isinstance(request, control_service.GetControlRequest): + request = control_service.GetControlRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -1121,7 +1164,7 @@ def sample_list_dataset_versions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_dataset_versions] + rpc = self._transport._wrapped_methods[self._transport.get_control] # Certain fields should be provided within the metadata header; # add these here. @@ -1140,30 +1183,20 @@ def sample_list_dataset_versions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatasetVersionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - def list_datasets( + def list_controls( self, - request: Optional[ - Union[maps_platform_datasets.ListDatasetsRequest, dict] - ] = None, + request: Optional[Union[control_service.ListControlsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatasetsPager: - r"""List all the datasets for the specified project. + ) -> pagers.ListControlsPager: + r"""Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. .. code-block:: python @@ -1174,31 +1207,32 @@ def list_datasets( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + from google.cloud import discoveryengine_v1 - def sample_list_datasets(): + def sample_list_controls(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = discoveryengine_v1.ControlServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + request = discoveryengine_v1.ListControlsRequest( parent="parent_value", ) # Make the request - page_result = client.list_datasets(request=request) + page_result = client.list_controls(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest, dict]): - The request object. Request to list datasets for the - project. + request (Union[google.cloud.discoveryengine_v1.types.ListControlsRequest, dict]): + The request object. Request for ListControls method. parent (str): - Required. The name of the project to - list all the datasets for. + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1210,9 +1244,9 @@ def sample_list_datasets(): sent along with the request as metadata. Returns: - google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsPager: - Response to list datasets for the - project. + google.cloud.discoveryengine_v1.services.control_service.pagers.ListControlsPager: + Response for ListControls method. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1230,8 +1264,8 @@ def sample_list_datasets(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.ListDatasetsRequest): - request = maps_platform_datasets.ListDatasetsRequest(request) + if not isinstance(request, control_service.ListControlsRequest): + request = control_service.ListControlsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -1239,7 +1273,7 @@ def sample_list_datasets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_datasets] + rpc = self._transport._wrapped_methods[self._transport.list_controls] # Certain fields should be provided within the metadata header; # add these here. @@ -1260,7 +1294,7 @@ def sample_list_datasets(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListDatasetsPager( + response = pagers.ListControlsPager( method=rpc, request=request, response=response, @@ -1270,83 +1304,112 @@ def sample_list_datasets(): # Done; return the response. return response - def delete_dataset( + def __enter__(self) -> "ControlServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( self, - request: Optional[ - Union[maps_platform_datasets.DeleteDatasetRequest, dict] - ] = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete the specified dataset and optionally all its - corresponding versions. + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. - .. code-block:: python + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) - def sample_delete_dataset(): - # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) - # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( - name="name_value", - ) + # Validate the universe domain. + self._validate_universe_domain() - # Make the request - client.delete_dataset(request=request) + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest, dict]): - The request object. Request to delete a dataset. + # Done; return the response. + return response - The dataset to be deleted. - name (str): - Required. Format: - projects/${project}/datasets/{dataset_id} + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.DeleteDatasetRequest): - request = maps_platform_datasets.DeleteDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dataset] + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1358,88 +1421,55 @@ def sample_delete_dataset(): self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def delete_dataset_version( + # Done; return the response. + return response + + def cancel_operation( self, - request: Optional[ - Union[maps_platform_datasets.DeleteDatasetVersionRequest, dict] - ] = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Delete a specific version of the dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.maps import mapsplatformdatasets_v1alpha - - def sample_delete_dataset_version(): - # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() - - # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( - name="name_value", - ) + r"""Starts asynchronous cancellation on a long-running operation. - # Make the request - client.delete_dataset_version(request=request) + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. Args: - request (Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest, dict]): - The request object. Request to delete a version of a - dataset. - name (str): - Required. Format: - projects/${project}/datasets/{dataset_id}@{version-id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: + None """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, maps_platform_datasets.DeleteDatasetVersionRequest): - request = maps_platform_datasets.DeleteDatasetVersionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dataset_version] + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1458,23 +1488,10 @@ def sample_delete_dataset_version(): metadata=metadata, ) - def __enter__(self) -> "MapsPlatformDatasetsV1AlphaClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -__all__ = ("MapsPlatformDatasetsV1AlphaClient",) +__all__ = ("ControlServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/pagers.py new file mode 100644 index 000000000000..53552024e69f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import control, control_service + + +class ListControlsPager: + """A pager for iterating through ``list_controls`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListControlsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``controls`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListControls`` requests and continue to iterate + through the ``controls`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListControlsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., control_service.ListControlsResponse], + request: control_service.ListControlsRequest, + response: control_service.ListControlsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListControlsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListControlsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = control_service.ListControlsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[control_service.ListControlsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[control.Control]: + for page in self.pages: + yield from page.controls + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListControlsAsyncPager: + """A pager for iterating through ``list_controls`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListControlsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``controls`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListControls`` requests and continue to iterate + through the ``controls`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListControlsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[control_service.ListControlsResponse]], + request: control_service.ListControlsRequest, + response: control_service.ListControlsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListControlsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListControlsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = control_service.ListControlsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[control_service.ListControlsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[control.Control]: + async def async_generator(): + async for page in self.pages: + for response in page.controls: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/__init__.py new file mode 100644 index 000000000000..19aaa21a8e17 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ControlServiceTransport +from .grpc import ControlServiceGrpcTransport +from .grpc_asyncio import ControlServiceGrpcAsyncIOTransport +from .rest import ControlServiceRestInterceptor, ControlServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ControlServiceTransport]] +_transport_registry["grpc"] = ControlServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ControlServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ControlServiceRestTransport + +__all__ = ( + "ControlServiceTransport", + "ControlServiceGrpcTransport", + "ControlServiceGrpcAsyncIOTransport", + "ControlServiceRestTransport", + "ControlServiceRestInterceptor", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/base.py similarity index 62% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/base.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/base.py index 884813185bcd..6cbe21ed3e05 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/base.py @@ -22,25 +22,27 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore -from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class MapsPlatformDatasetsV1AlphaTransport(abc.ABC): - """Abstract transport class for MapsPlatformDatasetsV1Alpha.""" +class ControlServiceTransport(abc.ABC): + """Abstract transport class for ControlService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = "mapsplatformdatasets.googleapis.com" + DEFAULT_HOST: str = "discoveryengine.googleapis.com" def __init__( self, @@ -59,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'mapsplatformdatasets.googleapis.com'). + The hostname to connect to (default: 'discoveryengine.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -129,65 +131,28 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.create_dataset: gapic_v1.method.wrap_method( - self.create_dataset, - default_timeout=60.0, - client_info=client_info, - ), - self.update_dataset_metadata: gapic_v1.method.wrap_method( - self.update_dataset_metadata, - default_timeout=60.0, - client_info=client_info, - ), - self.get_dataset: gapic_v1.method.wrap_method( - self.get_dataset, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self.create_control: gapic_v1.method.wrap_method( + self.create_control, + default_timeout=None, client_info=client_info, ), - self.list_dataset_versions: gapic_v1.method.wrap_method( - self.list_dataset_versions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self.delete_control: gapic_v1.method.wrap_method( + self.delete_control, + default_timeout=None, client_info=client_info, ), - self.list_datasets: gapic_v1.method.wrap_method( - self.list_datasets, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self.update_control: gapic_v1.method.wrap_method( + self.update_control, + default_timeout=None, client_info=client_info, ), - self.delete_dataset: gapic_v1.method.wrap_method( - self.delete_dataset, - default_timeout=60.0, + self.get_control: gapic_v1.method.wrap_method( + self.get_control, + default_timeout=None, client_info=client_info, ), - self.delete_dataset_version: gapic_v1.method.wrap_method( - self.delete_dataset_version, + self.list_controls: gapic_v1.method.wrap_method( + self.list_controls, default_timeout=None, client_info=client_info, ), @@ -203,77 +168,83 @@ def close(self): raise NotImplementedError() @property - def create_dataset( + def create_control( self, ) -> Callable[ - [maps_platform_datasets.CreateDatasetRequest], - Union[gmm_dataset.Dataset, Awaitable[gmm_dataset.Dataset]], + [control_service.CreateControlRequest], + Union[gcd_control.Control, Awaitable[gcd_control.Control]], ]: raise NotImplementedError() @property - def update_dataset_metadata( + def delete_control( self, ) -> Callable[ - [maps_platform_datasets.UpdateDatasetMetadataRequest], - Union[gmm_dataset.Dataset, Awaitable[gmm_dataset.Dataset]], + [control_service.DeleteControlRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property - def get_dataset( + def update_control( self, ) -> Callable[ - [maps_platform_datasets.GetDatasetRequest], - Union[dataset.Dataset, Awaitable[dataset.Dataset]], + [control_service.UpdateControlRequest], + Union[gcd_control.Control, Awaitable[gcd_control.Control]], ]: raise NotImplementedError() @property - def list_dataset_versions( + def get_control( self, ) -> Callable[ - [maps_platform_datasets.ListDatasetVersionsRequest], - Union[ - maps_platform_datasets.ListDatasetVersionsResponse, - Awaitable[maps_platform_datasets.ListDatasetVersionsResponse], - ], + [control_service.GetControlRequest], + Union[control.Control, Awaitable[control.Control]], ]: raise NotImplementedError() @property - def list_datasets( + def list_controls( self, ) -> Callable[ - [maps_platform_datasets.ListDatasetsRequest], + [control_service.ListControlsRequest], Union[ - maps_platform_datasets.ListDatasetsResponse, - Awaitable[maps_platform_datasets.ListDatasetsResponse], + control_service.ListControlsResponse, + Awaitable[control_service.ListControlsResponse], ], ]: raise NotImplementedError() @property - def delete_dataset( + def list_operations( self, ) -> Callable[ - [maps_platform_datasets.DeleteDatasetRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @property - def delete_dataset_version( + def get_operation( self, ) -> Callable[ - [maps_platform_datasets.DeleteDatasetVersionRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() -__all__ = ("MapsPlatformDatasetsV1AlphaTransport",) +__all__ = ("ControlServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/grpc.py new file mode 100644 index 000000000000..2af691a54fbc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/grpc.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service + +from .base import DEFAULT_CLIENT_INFO, ControlServiceTransport + + +class ControlServiceGrpcTransport(ControlServiceTransport): + """gRPC backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_control( + self, + ) -> Callable[[control_service.CreateControlRequest], gcd_control.Control]: + r"""Return a callable for the create control method over gRPC. + + Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1.Control] to create + already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_control" not in self._stubs: + self._stubs["create_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/CreateControl", + request_serializer=control_service.CreateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["create_control"] + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], empty_pb2.Empty]: + r"""Return a callable for the delete control method over gRPC. + + Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1.Control] to + delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteControlRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_control" not in self._stubs: + self._stubs["delete_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/DeleteControl", + request_serializer=control_service.DeleteControlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_control"] + + @property + def update_control( + self, + ) -> Callable[[control_service.UpdateControlRequest], gcd_control.Control]: + r"""Return a callable for the update control method over gRPC. + + Updates a Control. + + [Control][google.cloud.discoveryengine.v1.Control] action type + cannot be changed. If the + [Control][google.cloud.discoveryengine.v1.Control] to update + does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_control" not in self._stubs: + self._stubs["update_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/UpdateControl", + request_serializer=control_service.UpdateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["update_control"] + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], control.Control]: + r"""Return a callable for the get control method over gRPC. + + Gets a Control. + + Returns: + Callable[[~.GetControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_control" not in self._stubs: + self._stubs["get_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/GetControl", + request_serializer=control_service.GetControlRequest.serialize, + response_deserializer=control.Control.deserialize, + ) + return self._stubs["get_control"] + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], control_service.ListControlsResponse + ]: + r"""Return a callable for the list controls method over gRPC. + + Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.ListControlsRequest], + ~.ListControlsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_controls" not in self._stubs: + self._stubs["list_controls"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/ListControls", + request_serializer=control_service.ListControlsRequest.serialize, + response_deserializer=control_service.ListControlsResponse.deserialize, + ) + return self._stubs["list_controls"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ControlServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a900436d189e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/grpc_asyncio.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service + +from .base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .grpc import ControlServiceGrpcTransport + + +class ControlServiceGrpcAsyncIOTransport(ControlServiceTransport): + """gRPC AsyncIO backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_control( + self, + ) -> Callable[ + [control_service.CreateControlRequest], Awaitable[gcd_control.Control] + ]: + r"""Return a callable for the create control method over gRPC. + + Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1.Control] to create + already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateControlRequest], + Awaitable[~.Control]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_control" not in self._stubs: + self._stubs["create_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/CreateControl", + request_serializer=control_service.CreateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["create_control"] + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete control method over gRPC. + + Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1.Control] to + delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteControlRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_control" not in self._stubs: + self._stubs["delete_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/DeleteControl", + request_serializer=control_service.DeleteControlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_control"] + + @property + def update_control( + self, + ) -> Callable[ + [control_service.UpdateControlRequest], Awaitable[gcd_control.Control] + ]: + r"""Return a callable for the update control method over gRPC. + + Updates a Control. + + [Control][google.cloud.discoveryengine.v1.Control] action type + cannot be changed. If the + [Control][google.cloud.discoveryengine.v1.Control] to update + does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateControlRequest], + Awaitable[~.Control]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_control" not in self._stubs: + self._stubs["update_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/UpdateControl", + request_serializer=control_service.UpdateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["update_control"] + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], Awaitable[control.Control]]: + r"""Return a callable for the get control method over gRPC. + + Gets a Control. + + Returns: + Callable[[~.GetControlRequest], + Awaitable[~.Control]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_control" not in self._stubs: + self._stubs["get_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/GetControl", + request_serializer=control_service.GetControlRequest.serialize, + response_deserializer=control.Control.deserialize, + ) + return self._stubs["get_control"] + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], + Awaitable[control_service.ListControlsResponse], + ]: + r"""Return a callable for the list controls method over gRPC. + + Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.ListControlsRequest], + Awaitable[~.ListControlsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_controls" not in self._stubs: + self._stubs["list_controls"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ControlService/ListControls", + request_serializer=control_service.ListControlsRequest.serialize, + response_deserializer=control_service.ListControlsResponse.deserialize, + ) + return self._stubs["list_controls"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_control: gapic_v1.method_async.wrap_method( + self.create_control, + default_timeout=None, + client_info=client_info, + ), + self.delete_control: gapic_v1.method_async.wrap_method( + self.delete_control, + default_timeout=None, + client_info=client_info, + ), + self.update_control: gapic_v1.method_async.wrap_method( + self.update_control, + default_timeout=None, + client_info=client_info, + ), + self.get_control: gapic_v1.method_async.wrap_method( + self.get_control, + default_timeout=None, + client_info=client_info, + ), + self.list_controls: gapic_v1.method_async.wrap_method( + self.list_controls, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ControlServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py new file mode 100644 index 000000000000..dc8bd898b465 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py @@ -0,0 +1,1233 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service + +from .base import ControlServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ControlServiceRestInterceptor: + """Interceptor for ControlService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ControlServiceRestTransport. + + .. code-block:: python + class MyCustomControlServiceInterceptor(ControlServiceRestInterceptor): + def pre_create_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_control(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_control(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_controls(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_controls(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_control(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ControlServiceRestTransport(interceptor=MyCustomControlServiceInterceptor()) + client = ControlServiceClient(transport=transport) + + + """ + + def pre_create_control( + self, + request: control_service.CreateControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.CreateControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_create_control(self, response: gcd_control.Control) -> gcd_control.Control: + """Post-rpc interceptor for create_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_delete_control( + self, + request: control_service.DeleteControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.DeleteControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def pre_get_control( + self, + request: control_service.GetControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.GetControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_get_control(self, response: control.Control) -> control.Control: + """Post-rpc interceptor for get_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_list_controls( + self, + request: control_service.ListControlsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.ListControlsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_controls + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_list_controls( + self, response: control_service.ListControlsResponse + ) -> control_service.ListControlsResponse: + """Post-rpc interceptor for list_controls + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_update_control( + self, + request: control_service.UpdateControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.UpdateControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_update_control(self, response: gcd_control.Control) -> gcd_control.Control: + """Post-rpc interceptor for update_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ControlServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ControlServiceRestInterceptor + + +class ControlServiceRestTransport(ControlServiceTransport): + """REST backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ControlServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ControlServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateControl(ControlServiceRestStub): + def __hash__(self): + return hash("CreateControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "controlId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.CreateControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Call the create control method over HTTP. + + Args: + request (~.control_service.CreateControlRequest): + The request object. Request for CreateControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be considered + at serving time. Permitted actions dependent on + ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/controls", + "body": "control", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/controls", + "body": "control", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/controls", + "body": "control", + }, + ] + request, metadata = self._interceptor.pre_create_control(request, metadata) + pb_request = control_service.CreateControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_control.Control() + pb_resp = gcd_control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_control(resp) + return resp + + class _DeleteControl(ControlServiceRestStub): + def __hash__(self): + return hash("DeleteControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.DeleteControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete control method over HTTP. + + Args: + request (~.control_service.DeleteControlRequest): + The request object. Request for DeleteControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/controls/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/controls/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_control(request, metadata) + pb_request = control_service.DeleteControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetControl(ControlServiceRestStub): + def __hash__(self): + return hash("GetControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.GetControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Call the get control method over HTTP. + + Args: + request (~.control_service.GetControlRequest): + The request object. Request for GetControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be considered + at serving time. Permitted actions dependent on + ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/controls/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/controls/*}", + }, + ] + request, metadata = self._interceptor.pre_get_control(request, metadata) + pb_request = control_service.GetControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control.Control() + pb_resp = control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_control(resp) + return resp + + class _ListControls(ControlServiceRestStub): + def __hash__(self): + return hash("ListControls") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.ListControlsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control_service.ListControlsResponse: + r"""Call the list controls method over HTTP. + + Args: + request (~.control_service.ListControlsRequest): + The request object. Request for ListControls method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control_service.ListControlsResponse: + Response for ListControls method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/controls", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/controls", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/controls", + }, + ] + request, metadata = self._interceptor.pre_list_controls(request, metadata) + pb_request = control_service.ListControlsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control_service.ListControlsResponse() + pb_resp = control_service.ListControlsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_controls(resp) + return resp + + class _UpdateControl(ControlServiceRestStub): + def __hash__(self): + return hash("UpdateControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.UpdateControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Call the update control method over HTTP. + + Args: + request (~.control_service.UpdateControlRequest): + The request object. Request for UpdateControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a [ServingConfig][] to be considered + at serving time. Permitted actions dependent on + ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{control.name=projects/*/locations/*/dataStores/*/controls/*}", + "body": "control", + }, + { + "method": "patch", + "uri": "/v1/{control.name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + "body": "control", + }, + { + "method": "patch", + "uri": "/v1/{control.name=projects/*/locations/*/collections/*/engines/*/controls/*}", + "body": "control", + }, + ] + request, metadata = self._interceptor.pre_update_control(request, metadata) + pb_request = control_service.UpdateControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_control.Control() + pb_resp = gcd_control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_control(resp) + return resp + + @property + def create_control( + self, + ) -> Callable[[control_service.CreateControlRequest], gcd_control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], control_service.ListControlsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListControls(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_control( + self, + ) -> Callable[[control_service.UpdateControlRequest], gcd_control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ControlServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py index e3ad39fa0bc0..92e3dfb62ec0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py @@ -56,7 +56,10 @@ search_service, ) from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session from .client import ConversationalSearchServiceClient from .transports.base import DEFAULT_CLIENT_INFO, ConversationalSearchServiceTransport @@ -77,6 +80,12 @@ class ConversationalSearchServiceAsyncClient: ) _DEFAULT_UNIVERSE = ConversationalSearchServiceClient._DEFAULT_UNIVERSE + answer_path = staticmethod(ConversationalSearchServiceClient.answer_path) + parse_answer_path = staticmethod( + ConversationalSearchServiceClient.parse_answer_path + ) + chunk_path = staticmethod(ConversationalSearchServiceClient.chunk_path) + parse_chunk_path = staticmethod(ConversationalSearchServiceClient.parse_chunk_path) conversation_path = staticmethod( ConversationalSearchServiceClient.conversation_path ) @@ -97,6 +106,10 @@ class ConversationalSearchServiceAsyncClient: parse_serving_config_path = staticmethod( ConversationalSearchServiceClient.parse_serving_config_path ) + session_path = staticmethod(ConversationalSearchServiceClient.session_path) + parse_session_path = staticmethod( + ConversationalSearchServiceClient.parse_session_path + ) common_billing_account_path = staticmethod( ConversationalSearchServiceClient.common_billing_account_path ) @@ -1014,43 +1027,186 @@ async def sample_list_conversations(): # Done; return the response. return response - async def list_operations( + async def answer_query( self, - request: Optional[operations_pb2.ListOperationsRequest] = None, + request: Optional[ + Union[conversational_search_service.AnswerQueryRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. + ) -> conversational_search_service.AnswerQueryResponse: + r"""Answer query method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_answer_query(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + query = discoveryengine_v1.Query() + query.text = "text_value" + + request = discoveryengine_v1.AnswerQueryRequest( + serving_config="serving_config_value", + query=query, + ) + + # Make the request + response = await client.answer_query(request=request) + + # Handle the response + print(response) Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): + request (Optional[Union[google.cloud.discoveryengine_v1.types.AnswerQueryRequest, dict]]): The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. + google.cloud.discoveryengine_v1.types.AnswerQueryResponse: + Response message for + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.AnswerQueryRequest): + request = conversational_search_service.AnswerQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, + rpc = self._client._transport._wrapped_methods[ + self._client._transport.answer_query + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config", request.serving_config),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) + # Done; return the response. + return response + + async def get_answer( + self, + request: Optional[ + Union[conversational_search_service.GetAnswerRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> answer.Answer: + r"""Gets a Answer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_answer(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetAnswerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_answer(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetAnswerRequest, dict]]): + The request object. Request for GetAnswer method. + name (:class:`str`): + Required. The resource name of the Answer to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/engines/{engine_id}/sessions/{session_id}/answers/{answer_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Answer: + Defines an answer. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.GetAnswerRequest): + request = conversational_search_service.GetAnswerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_answer + ] + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( @@ -1071,47 +1227,107 @@ async def list_operations( # Done; return the response. return response - async def get_operation( + async def create_session( self, - request: Optional[operations_pb2.GetOperationRequest] = None, + request: Optional[ + Union[conversational_search_service.CreateSessionRequest, dict] + ] = None, *, + parent: Optional[str] = None, + session: Optional[gcd_session.Session] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. + ) -> gcd_session.Session: + r"""Creates a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + create already exists, an ALREADY_EXISTS error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_create_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSessionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_session(request=request) + + # Handle the response + print(response) Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateSessionRequest, dict]]): + The request object. Request for CreateSession method. + parent (:class:`str`): + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session (:class:`google.cloud.discoveryengine_v1.types.Session`): + Required. The session to create. + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. + google.cloud.discoveryengine_v1.types.Session: + External session proto definition. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, session]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.CreateSessionRequest): + request = conversational_search_service.CreateSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if session is not None: + request.session = session # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_session + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1128,6 +1344,627 @@ async def get_operation( # Done; return the response. return response + async def delete_session( + self, + request: Optional[ + Union[conversational_search_service.DeleteSessionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + delete does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_delete_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + await client.delete_session(request=request) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteSessionRequest, dict]]): + The request object. Request for DeleteSession method. + name (:class:`str`): + Required. The resource name of the Session to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}/sessions/{session_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.DeleteSessionRequest): + request = conversational_search_service.DeleteSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_session + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_session( + self, + request: Optional[ + Union[conversational_search_service.UpdateSessionRequest, dict] + ] = None, + *, + session: Optional[gcd_session.Session] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_session.Session: + r"""Updates a Session. + + [Session][google.cloud.discoveryengine.v1.Session] action type + cannot be changed. If the + [Session][google.cloud.discoveryengine.v1.Session] to update + does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_update_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSessionRequest( + ) + + # Make the request + response = await client.update_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateSessionRequest, dict]]): + The request object. Request for UpdateSession method. + session (:class:`google.cloud.discoveryengine_v1.types.Session`): + Required. The Session to update. + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [Session][google.cloud.discoveryengine.v1.Session] to + update. The following are NOT supported: + + - [Session.name][google.cloud.discoveryengine.v1.Session.name] + + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Session: + External session proto definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.UpdateSessionRequest): + request = conversational_search_service.UpdateSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_session + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("session.name", request.session.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_session( + self, + request: Optional[ + Union[conversational_search_service.GetSessionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.Session: + r"""Gets a Session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetSessionRequest, dict]]): + The request object. Request for GetSession method. + name (:class:`str`): + Required. The resource name of the Session to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}/sessions/{session_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Session: + External session proto definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.GetSessionRequest): + request = conversational_search_service.GetSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_session + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sessions( + self, + request: Optional[ + Union[conversational_search_service.ListSessionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists all Sessions by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_sessions(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListSessionsRequest, dict]]): + The request object. Request for ListSessions method. + parent (:class:`str`): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListSessionsAsyncPager: + Response for ListSessions method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.ListSessionsRequest): + request = conversational_search_service.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sessions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ConversationalSearchServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py index 926ec9dec766..828f5daf0658 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py @@ -61,7 +61,10 @@ search_service, ) from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session from .transports.base import DEFAULT_CLIENT_INFO, ConversationalSearchServiceTransport from .transports.grpc import ConversationalSearchServiceGrpcTransport @@ -199,6 +202,60 @@ def transport(self) -> ConversationalSearchServiceTransport: """ return self._transport + @staticmethod + def answer_path( + project: str, + location: str, + data_store: str, + session: str, + answer: str, + ) -> str: + """Returns a fully-qualified answer string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/sessions/{session}/answers/{answer}".format( + project=project, + location=location, + data_store=data_store, + session=session, + answer=answer, + ) + + @staticmethod + def parse_answer_path(path: str) -> Dict[str, str]: + """Parses a answer path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/sessions/(?P.+?)/answers/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def chunk_path( + project: str, + location: str, + data_store: str, + branch: str, + document: str, + chunk: str, + ) -> str: + """Returns a fully-qualified chunk string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}/chunks/{chunk}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + chunk=chunk, + ) + + @staticmethod + def parse_chunk_path(path: str) -> Dict[str, str]: + """Parses a chunk path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)/documents/(?P.+?)/chunks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def conversation_path( project: str, @@ -295,6 +352,30 @@ def parse_serving_config_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def session_path( + project: str, + location: str, + data_store: str, + session: str, + ) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/sessions/{session}".format( + project=project, + location=location, + data_store=data_store, + session=session, + ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str, str]: + """Parses a session path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/sessions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -1474,60 +1555,82 @@ def sample_list_conversations(): # Done; return the response. return response - def __enter__(self) -> "ConversationalSearchServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( + def answer_query( self, - request: Optional[operations_pb2.ListOperationsRequest] = None, + request: Optional[ + Union[conversational_search_service.AnswerQueryRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. + ) -> conversational_search_service.AnswerQueryResponse: + r"""Answer query method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_answer_query(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + query = discoveryengine_v1.Query() + query.text = "text_value" + + request = discoveryengine_v1.AnswerQueryRequest( + serving_config="serving_config_value", + query=query, + ) + + # Make the request + response = client.answer_query(request=request) + + # Handle the response + print(response) Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): + request (Union[google.cloud.discoveryengine_v1.types.AnswerQueryRequest, dict]): The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. + google.cloud.discoveryengine_v1.types.AnswerQueryResponse: + Response message for + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.AnswerQueryRequest): + request = conversational_search_service.AnswerQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.answer_query] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config", request.serving_config),) + ), ) # Validate the universe domain. @@ -1544,42 +1647,88 @@ def list_operations( # Done; return the response. return response - def get_operation( + def get_answer( self, - request: Optional[operations_pb2.GetOperationRequest] = None, + request: Optional[ + Union[conversational_search_service.GetAnswerRequest, dict] + ] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. + ) -> answer.Answer: + r"""Gets a Answer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_answer(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetAnswerRequest( + name="name_value", + ) + + # Make the request + response = client.get_answer(request=request) + + # Handle the response + print(response) Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. + request (Union[google.cloud.discoveryengine_v1.types.GetAnswerRequest, dict]): + The request object. Request for GetAnswer method. + name (str): + Required. The resource name of the Answer to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/engines/{engine_id}/sessions/{session_id}/answers/{answer_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. + google.cloud.discoveryengine_v1.types.Answer: + Defines an answer. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.GetAnswerRequest): + request = conversational_search_service.GetAnswerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_answer] # Certain fields should be provided within the metadata header; # add these here. @@ -1601,6 +1750,742 @@ def get_operation( # Done; return the response. return response + def create_session( + self, + request: Optional[ + Union[conversational_search_service.CreateSessionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + session: Optional[gcd_session.Session] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_session.Session: + r"""Creates a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + create already exists, an ALREADY_EXISTS error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_create_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSessionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CreateSessionRequest, dict]): + The request object. Request for CreateSession method. + parent (str): + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session (google.cloud.discoveryengine_v1.types.Session): + Required. The session to create. + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Session: + External session proto definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, session]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.CreateSessionRequest): + request = conversational_search_service.CreateSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if session is not None: + request.session = session + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_session( + self, + request: Optional[ + Union[conversational_search_service.DeleteSessionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + delete does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_delete_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + client.delete_session(request=request) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DeleteSessionRequest, dict]): + The request object. Request for DeleteSession method. + name (str): + Required. The resource name of the Session to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}/sessions/{session_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.DeleteSessionRequest): + request = conversational_search_service.DeleteSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_session( + self, + request: Optional[ + Union[conversational_search_service.UpdateSessionRequest, dict] + ] = None, + *, + session: Optional[gcd_session.Session] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_session.Session: + r"""Updates a Session. + + [Session][google.cloud.discoveryengine.v1.Session] action type + cannot be changed. If the + [Session][google.cloud.discoveryengine.v1.Session] to update + does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_update_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSessionRequest( + ) + + # Make the request + response = client.update_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.UpdateSessionRequest, dict]): + The request object. Request for UpdateSession method. + session (google.cloud.discoveryengine_v1.types.Session): + Required. The Session to update. + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [Session][google.cloud.discoveryengine.v1.Session] to + update. The following are NOT supported: + + - [Session.name][google.cloud.discoveryengine.v1.Session.name] + + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Session: + External session proto definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.UpdateSessionRequest): + request = conversational_search_service.UpdateSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("session.name", request.session.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_session( + self, + request: Optional[ + Union[conversational_search_service.GetSessionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.Session: + r"""Gets a Session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetSessionRequest, dict]): + The request object. Request for GetSession method. + name (str): + Required. The resource name of the Session to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}/sessions/{session_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Session: + External session proto definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.GetSessionRequest): + request = conversational_search_service.GetSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sessions( + self, + request: Optional[ + Union[conversational_search_service.ListSessionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists all Sessions by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_sessions(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListSessionsRequest, dict]): + The request object. Request for ListSessions method. + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListSessionsPager: + Response for ListSessions method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversational_search_service.ListSessionsRequest): + request = conversational_search_service.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ConversationalSearchServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/pagers.py index 8d3784d4c2b7..dfa417ed0a18 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/pagers.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/pagers.py @@ -27,6 +27,7 @@ from google.cloud.discoveryengine_v1.types import ( conversation, conversational_search_service, + session, ) @@ -162,3 +163,135 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., conversational_search_service.ListSessionsResponse], + request: conversational_search_service.ListSessionsRequest, + response: conversational_search_service.ListSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = conversational_search_service.ListSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[conversational_search_service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[session.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[conversational_search_service.ListSessionsResponse] + ], + request: conversational_search_service.ListSessionsRequest, + response: conversational_search_service.ListSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = conversational_search_service.ListSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[conversational_search_service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[session.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/base.py index e09a5c0b2095..a217e2ea2eba 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/base.py @@ -29,8 +29,11 @@ from google.cloud.discoveryengine_v1 import gapic_version as package_version from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation from google.cloud.discoveryengine_v1.types import conversational_search_service +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -161,6 +164,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.answer_query: gapic_v1.method.wrap_method( + self.answer_query, + default_timeout=None, + client_info=client_info, + ), + self.get_answer: gapic_v1.method.wrap_method( + self.get_answer, + default_timeout=None, + client_info=client_info, + ), + self.create_session: gapic_v1.method.wrap_method( + self.create_session, + default_timeout=None, + client_info=client_info, + ), + self.delete_session: gapic_v1.method.wrap_method( + self.delete_session, + default_timeout=None, + client_info=client_info, + ), + self.update_session: gapic_v1.method.wrap_method( + self.update_session, + default_timeout=None, + client_info=client_info, + ), + self.get_session: gapic_v1.method.wrap_method( + self.get_session, + default_timeout=None, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -232,6 +270,75 @@ def list_conversations( ]: raise NotImplementedError() + @property + def answer_query( + self, + ) -> Callable[ + [conversational_search_service.AnswerQueryRequest], + Union[ + conversational_search_service.AnswerQueryResponse, + Awaitable[conversational_search_service.AnswerQueryResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_answer( + self, + ) -> Callable[ + [conversational_search_service.GetAnswerRequest], + Union[answer.Answer, Awaitable[answer.Answer]], + ]: + raise NotImplementedError() + + @property + def create_session( + self, + ) -> Callable[ + [conversational_search_service.CreateSessionRequest], + Union[gcd_session.Session, Awaitable[gcd_session.Session]], + ]: + raise NotImplementedError() + + @property + def delete_session( + self, + ) -> Callable[ + [conversational_search_service.DeleteSessionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_session( + self, + ) -> Callable[ + [conversational_search_service.UpdateSessionRequest], + Union[gcd_session.Session, Awaitable[gcd_session.Session]], + ]: + raise NotImplementedError() + + @property + def get_session( + self, + ) -> Callable[ + [conversational_search_service.GetSessionRequest], + Union[session.Session, Awaitable[session.Session]], + ]: + raise NotImplementedError() + + @property + def list_sessions( + self, + ) -> Callable[ + [conversational_search_service.ListSessionsRequest], + Union[ + conversational_search_service.ListSessionsResponse, + Awaitable[conversational_search_service.ListSessionsResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, @@ -253,6 +360,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc.py index 55ffe4212c24..4279e74d37d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc.py @@ -26,8 +26,11 @@ import grpc # type: ignore from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation from google.cloud.discoveryengine_v1.types import conversational_search_service +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session from .base import DEFAULT_CLIENT_INFO, ConversationalSearchServiceTransport @@ -425,9 +428,232 @@ def list_conversations( ) return self._stubs["list_conversations"] + @property + def answer_query( + self, + ) -> Callable[ + [conversational_search_service.AnswerQueryRequest], + conversational_search_service.AnswerQueryResponse, + ]: + r"""Return a callable for the answer query method over gRPC. + + Answer query method. + + Returns: + Callable[[~.AnswerQueryRequest], + ~.AnswerQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "answer_query" not in self._stubs: + self._stubs["answer_query"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/AnswerQuery", + request_serializer=conversational_search_service.AnswerQueryRequest.serialize, + response_deserializer=conversational_search_service.AnswerQueryResponse.deserialize, + ) + return self._stubs["answer_query"] + + @property + def get_answer( + self, + ) -> Callable[[conversational_search_service.GetAnswerRequest], answer.Answer]: + r"""Return a callable for the get answer method over gRPC. + + Gets a Answer. + + Returns: + Callable[[~.GetAnswerRequest], + ~.Answer]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_answer" not in self._stubs: + self._stubs["get_answer"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/GetAnswer", + request_serializer=conversational_search_service.GetAnswerRequest.serialize, + response_deserializer=answer.Answer.deserialize, + ) + return self._stubs["get_answer"] + + @property + def create_session( + self, + ) -> Callable[ + [conversational_search_service.CreateSessionRequest], gcd_session.Session + ]: + r"""Return a callable for the create session method over gRPC. + + Creates a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + create already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_session" not in self._stubs: + self._stubs["create_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/CreateSession", + request_serializer=conversational_search_service.CreateSessionRequest.serialize, + response_deserializer=gcd_session.Session.deserialize, + ) + return self._stubs["create_session"] + + @property + def delete_session( + self, + ) -> Callable[ + [conversational_search_service.DeleteSessionRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete session method over gRPC. + + Deletes a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteSessionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_session" not in self._stubs: + self._stubs["delete_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/DeleteSession", + request_serializer=conversational_search_service.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_session"] + + @property + def update_session( + self, + ) -> Callable[ + [conversational_search_service.UpdateSessionRequest], gcd_session.Session + ]: + r"""Return a callable for the update session method over gRPC. + + Updates a Session. + + [Session][google.cloud.discoveryengine.v1.Session] action type + cannot be changed. If the + [Session][google.cloud.discoveryengine.v1.Session] to update + does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_session" not in self._stubs: + self._stubs["update_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/UpdateSession", + request_serializer=conversational_search_service.UpdateSessionRequest.serialize, + response_deserializer=gcd_session.Session.deserialize, + ) + return self._stubs["update_session"] + + @property + def get_session( + self, + ) -> Callable[[conversational_search_service.GetSessionRequest], session.Session]: + r"""Return a callable for the get session method over gRPC. + + Gets a Session. + + Returns: + Callable[[~.GetSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_session" not in self._stubs: + self._stubs["get_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/GetSession", + request_serializer=conversational_search_service.GetSessionRequest.serialize, + response_deserializer=session.Session.deserialize, + ) + return self._stubs["get_session"] + + @property + def list_sessions( + self, + ) -> Callable[ + [conversational_search_service.ListSessionsRequest], + conversational_search_service.ListSessionsResponse, + ]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all Sessions by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sessions" not in self._stubs: + self._stubs["list_sessions"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/ListSessions", + request_serializer=conversational_search_service.ListSessionsRequest.serialize, + response_deserializer=conversational_search_service.ListSessionsResponse.deserialize, + ) + return self._stubs["list_sessions"] + def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc_asyncio.py index 547e77a1284a..095be3633250 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/grpc_asyncio.py @@ -28,8 +28,11 @@ from grpc.experimental import aio # type: ignore from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation from google.cloud.discoveryengine_v1.types import conversational_search_service +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session from .base import DEFAULT_CLIENT_INFO, ConversationalSearchServiceTransport from .grpc import ConversationalSearchServiceGrpcTransport @@ -432,6 +435,218 @@ def list_conversations( ) return self._stubs["list_conversations"] + @property + def answer_query( + self, + ) -> Callable[ + [conversational_search_service.AnswerQueryRequest], + Awaitable[conversational_search_service.AnswerQueryResponse], + ]: + r"""Return a callable for the answer query method over gRPC. + + Answer query method. + + Returns: + Callable[[~.AnswerQueryRequest], + Awaitable[~.AnswerQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "answer_query" not in self._stubs: + self._stubs["answer_query"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/AnswerQuery", + request_serializer=conversational_search_service.AnswerQueryRequest.serialize, + response_deserializer=conversational_search_service.AnswerQueryResponse.deserialize, + ) + return self._stubs["answer_query"] + + @property + def get_answer( + self, + ) -> Callable[ + [conversational_search_service.GetAnswerRequest], Awaitable[answer.Answer] + ]: + r"""Return a callable for the get answer method over gRPC. + + Gets a Answer. + + Returns: + Callable[[~.GetAnswerRequest], + Awaitable[~.Answer]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_answer" not in self._stubs: + self._stubs["get_answer"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/GetAnswer", + request_serializer=conversational_search_service.GetAnswerRequest.serialize, + response_deserializer=answer.Answer.deserialize, + ) + return self._stubs["get_answer"] + + @property + def create_session( + self, + ) -> Callable[ + [conversational_search_service.CreateSessionRequest], + Awaitable[gcd_session.Session], + ]: + r"""Return a callable for the create session method over gRPC. + + Creates a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + create already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_session" not in self._stubs: + self._stubs["create_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/CreateSession", + request_serializer=conversational_search_service.CreateSessionRequest.serialize, + response_deserializer=gcd_session.Session.deserialize, + ) + return self._stubs["create_session"] + + @property + def delete_session( + self, + ) -> Callable[ + [conversational_search_service.DeleteSessionRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete session method over gRPC. + + Deletes a Session. + + If the [Session][google.cloud.discoveryengine.v1.Session] to + delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteSessionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_session" not in self._stubs: + self._stubs["delete_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/DeleteSession", + request_serializer=conversational_search_service.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_session"] + + @property + def update_session( + self, + ) -> Callable[ + [conversational_search_service.UpdateSessionRequest], + Awaitable[gcd_session.Session], + ]: + r"""Return a callable for the update session method over gRPC. + + Updates a Session. + + [Session][google.cloud.discoveryengine.v1.Session] action type + cannot be changed. If the + [Session][google.cloud.discoveryengine.v1.Session] to update + does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_session" not in self._stubs: + self._stubs["update_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/UpdateSession", + request_serializer=conversational_search_service.UpdateSessionRequest.serialize, + response_deserializer=gcd_session.Session.deserialize, + ) + return self._stubs["update_session"] + + @property + def get_session( + self, + ) -> Callable[ + [conversational_search_service.GetSessionRequest], Awaitable[session.Session] + ]: + r"""Return a callable for the get session method over gRPC. + + Gets a Session. + + Returns: + Callable[[~.GetSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_session" not in self._stubs: + self._stubs["get_session"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/GetSession", + request_serializer=conversational_search_service.GetSessionRequest.serialize, + response_deserializer=session.Session.deserialize, + ) + return self._stubs["get_session"] + + @property + def list_sessions( + self, + ) -> Callable[ + [conversational_search_service.ListSessionsRequest], + Awaitable[conversational_search_service.ListSessionsResponse], + ]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all Sessions by their parent + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sessions" not in self._stubs: + self._stubs["list_sessions"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ConversationalSearchService/ListSessions", + request_serializer=conversational_search_service.ListSessionsRequest.serialize, + response_deserializer=conversational_search_service.ListSessionsResponse.deserialize, + ) + return self._stubs["list_sessions"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -465,11 +680,63 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.answer_query: gapic_v1.method_async.wrap_method( + self.answer_query, + default_timeout=None, + client_info=client_info, + ), + self.get_answer: gapic_v1.method_async.wrap_method( + self.get_answer, + default_timeout=None, + client_info=client_info, + ), + self.create_session: gapic_v1.method_async.wrap_method( + self.create_session, + default_timeout=None, + client_info=client_info, + ), + self.delete_session: gapic_v1.method_async.wrap_method( + self.delete_session, + default_timeout=None, + client_info=client_info, + ), + self.update_session: gapic_v1.method_async.wrap_method( + self.update_session, + default_timeout=None, + client_info=client_info, + ), + self.get_session: gapic_v1.method_async.wrap_method( + self.get_session, + default_timeout=None, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method_async.wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), } def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py index 12b8d4a2417b..84b6bc30c1a6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py @@ -41,8 +41,11 @@ from google.protobuf import empty_pb2 # type: ignore from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation from google.cloud.discoveryengine_v1.types import conversational_search_service +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session from .base import ConversationalSearchServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -69,6 +72,14 @@ class ConversationalSearchServiceRestInterceptor: .. code-block:: python class MyCustomConversationalSearchServiceInterceptor(ConversationalSearchServiceRestInterceptor): + def pre_answer_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_answer_query(self, response): + logging.log(f"Received response: {response}") + return response + def pre_converse_conversation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -85,10 +96,30 @@ def post_create_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_create_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_session(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_conversation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_answer(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_answer(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_conversation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -97,6 +128,14 @@ def post_get_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_get_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_session(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_conversations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -105,6 +144,14 @@ def post_list_conversations(self, response): logging.log(f"Received response: {response}") return response + def pre_list_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sessions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_conversation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -113,12 +160,45 @@ def post_update_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_update_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_session(self, response): + logging.log(f"Received response: {response}") + return response + transport = ConversationalSearchServiceRestTransport(interceptor=MyCustomConversationalSearchServiceInterceptor()) client = ConversationalSearchServiceClient(transport=transport) """ + def pre_answer_query( + self, + request: conversational_search_service.AnswerQueryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.AnswerQueryRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for answer_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_answer_query( + self, response: conversational_search_service.AnswerQueryResponse + ) -> conversational_search_service.AnswerQueryResponse: + """Post-rpc interceptor for answer_query + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_converse_conversation( self, request: conversational_search_service.ConverseConversationRequest, @@ -171,6 +251,29 @@ def post_create_conversation( """ return response + def pre_create_session( + self, + request: conversational_search_service.CreateSessionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.CreateSessionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_create_session(self, response: gcd_session.Session) -> gcd_session.Session: + """Post-rpc interceptor for create_session + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_delete_conversation( self, request: conversational_search_service.DeleteConversationRequest, @@ -186,6 +289,43 @@ def pre_delete_conversation( """ return request, metadata + def pre_delete_session( + self, + request: conversational_search_service.DeleteSessionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.DeleteSessionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def pre_get_answer( + self, + request: conversational_search_service.GetAnswerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.GetAnswerRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_answer + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_get_answer(self, response: answer.Answer) -> answer.Answer: + """Post-rpc interceptor for get_answer + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_get_conversation( self, request: conversational_search_service.GetConversationRequest, @@ -211,6 +351,29 @@ def post_get_conversation( """ return response + def pre_get_session( + self, + request: conversational_search_service.GetSessionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.GetSessionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_get_session(self, response: session.Session) -> session.Session: + """Post-rpc interceptor for get_session + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_list_conversations( self, request: conversational_search_service.ListConversationsRequest, @@ -237,6 +400,31 @@ def post_list_conversations( """ return response + def pre_list_sessions( + self, + request: conversational_search_service.ListSessionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.ListSessionsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_list_sessions( + self, response: conversational_search_service.ListSessionsResponse + ) -> conversational_search_service.ListSessionsResponse: + """Post-rpc interceptor for list_sessions + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_update_conversation( self, request: conversational_search_service.UpdateConversationRequest, @@ -263,6 +451,50 @@ def post_update_conversation( """ return response + def pre_update_session( + self, + request: conversational_search_service.UpdateSessionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversational_search_service.UpdateSessionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_update_session(self, response: gcd_session.Session) -> gcd_session.Session: + """Post-rpc interceptor for update_session + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -406,6 +638,113 @@ def __init__( self._interceptor = interceptor or ConversationalSearchServiceRestInterceptor() self._prep_wrapped_messages(client_info) + class _AnswerQuery(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("AnswerQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.AnswerQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversational_search_service.AnswerQueryResponse: + r"""Call the answer query method over HTTP. + + Args: + request (~.conversational_search_service.AnswerQueryRequest): + The request object. Request message for + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.conversational_search_service.AnswerQueryResponse: + Response message for + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{serving_config=projects/*/locations/*/dataStores/*/servingConfigs/*}:answer", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:answer", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{serving_config=projects/*/locations/*/collections/*/engines/*/servingConfigs/*}:answer", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_answer_query(request, metadata) + pb_request = conversational_search_service.AnswerQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = conversational_search_service.AnswerQueryResponse() + pb_resp = conversational_search_service.AnswerQueryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_answer_query(resp) + return resp + class _ConverseConversation(ConversationalSearchServiceRestStub): def __hash__(self): return hash("ConverseConversation") @@ -628,9 +967,563 @@ def __call__( resp = self._interceptor.post_create_conversation(resp) return resp + class _CreateSession(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("CreateSession") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.CreateSessionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_session.Session: + r"""Call the create session method over HTTP. + + Args: + request (~.conversational_search_service.CreateSessionRequest): + The request object. Request for CreateSession method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_session.Session: + External session proto definition. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/sessions", + "body": "session", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/sessions", + "body": "session", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/sessions", + "body": "session", + }, + ] + request, metadata = self._interceptor.pre_create_session(request, metadata) + pb_request = conversational_search_service.CreateSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_session.Session() + pb_resp = gcd_session.Session.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_session(resp) + return resp + class _DeleteConversation(ConversationalSearchServiceRestStub): def __hash__(self): - return hash("DeleteConversation") + return hash("DeleteConversation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.DeleteConversationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete conversation method over HTTP. + + Args: + request (~.conversational_search_service.DeleteConversationRequest): + The request object. Request for DeleteConversation + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_conversation( + request, metadata + ) + pb_request = conversational_search_service.DeleteConversationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSession(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("DeleteSession") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.DeleteSessionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete session method over HTTP. + + Args: + request (~.conversational_search_service.DeleteSessionRequest): + The request object. Request for DeleteSession method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/sessions/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/sessions/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/sessions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_session(request, metadata) + pb_request = conversational_search_service.DeleteSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetAnswer(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("GetAnswer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.GetAnswerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> answer.Answer: + r"""Call the get answer method over HTTP. + + Args: + request (~.conversational_search_service.GetAnswerRequest): + The request object. Request for GetAnswer method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.answer.Answer: + Defines an answer. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/sessions/*/answers/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/sessions/*/answers/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/sessions/*/answers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_answer(request, metadata) + pb_request = conversational_search_service.GetAnswerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = answer.Answer() + pb_resp = answer.Answer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_answer(resp) + return resp + + class _GetConversation(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("GetConversation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.GetConversationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversation.Conversation: + r"""Call the get conversation method over HTTP. + + Args: + request (~.conversational_search_service.GetConversationRequest): + The request object. Request for GetConversation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.conversation.Conversation: + External conversation proto + definition. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_conversation( + request, metadata + ) + pb_request = conversational_search_service.GetConversationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = conversation.Conversation() + pb_resp = conversation.Conversation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_conversation(resp) + return resp + + class _GetSession(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("GetSession") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversational_search_service.GetSessionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.Session: + r"""Call the get session method over HTTP. + + Args: + request (~.conversational_search_service.GetSessionRequest): + The request object. Request for GetSession method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.session.Session: + External session proto definition. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/sessions/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/sessions/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/sessions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_session(request, metadata) + pb_request = conversational_search_service.GetSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = session.Session() + pb_resp = session.Session.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_session(resp) + return resp + + class _ListConversations(ConversationalSearchServiceRestStub): + def __hash__(self): + return hash("ListConversations") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -644,43 +1537,48 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: conversational_search_service.DeleteConversationRequest, + request: conversational_search_service.ListConversationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ): - r"""Call the delete conversation method over HTTP. + ) -> conversational_search_service.ListConversationsResponse: + r"""Call the list conversations method over HTTP. Args: - request (~.conversational_search_service.DeleteConversationRequest): - The request object. Request for DeleteConversation - method. + request (~.conversational_search_service.ListConversationsRequest): + The request object. Request for ListConversations method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + ~.conversational_search_service.ListConversationsResponse: + Response for ListConversations + method. + """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/conversations", }, { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", }, { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", }, ] - request, metadata = self._interceptor.pre_delete_conversation( + request, metadata = self._interceptor.pre_list_conversations( request, metadata ) - pb_request = conversational_search_service.DeleteConversationRequest.pb( + pb_request = conversational_search_service.ListConversationsRequest.pb( request ) transcoded_request = path_template.transcode(http_options, pb_request) @@ -714,9 +1612,17 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GetConversation(ConversationalSearchServiceRestStub): + # Return the response + resp = conversational_search_service.ListConversationsResponse() + pb_resp = conversational_search_service.ListConversationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_conversations(resp) + return resp + + class _ListSessions(ConversationalSearchServiceRestStub): def __hash__(self): - return hash("GetConversation") + return hash("ListSessions") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -730,17 +1636,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: conversational_search_service.GetConversationRequest, + request: conversational_search_service.ListSessionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> conversation.Conversation: - r"""Call the get conversation method over HTTP. + ) -> conversational_search_service.ListSessionsResponse: + r"""Call the list sessions method over HTTP. Args: - request (~.conversational_search_service.GetConversationRequest): - The request object. Request for GetConversation method. + request (~.conversational_search_service.ListSessionsRequest): + The request object. Request for ListSessions method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -748,32 +1654,26 @@ def __call__( sent along with the request as metadata. Returns: - ~.conversation.Conversation: - External conversation proto - definition. - + ~.conversational_search_service.ListSessionsResponse: + Response for ListSessions method. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/sessions", }, { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/sessions", }, { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/sessions", }, ] - request, metadata = self._interceptor.pre_get_conversation( - request, metadata - ) - pb_request = conversational_search_service.GetConversationRequest.pb( - request - ) + request, metadata = self._interceptor.pre_list_sessions(request, metadata) + pb_request = conversational_search_service.ListSessionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -806,16 +1706,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = conversation.Conversation() - pb_resp = conversation.Conversation.pb(resp) + resp = conversational_search_service.ListSessionsResponse() + pb_resp = conversational_search_service.ListSessionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_conversation(resp) + resp = self._interceptor.post_list_sessions(resp) return resp - class _ListConversations(ConversationalSearchServiceRestStub): + class _UpdateConversation(ConversationalSearchServiceRestStub): def __hash__(self): - return hash("ListConversations") + return hash("UpdateConversation") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -829,17 +1729,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: conversational_search_service.ListConversationsRequest, + request: conversational_search_service.UpdateConversationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> conversational_search_service.ListConversationsResponse: - r"""Call the list conversations method over HTTP. + ) -> gcd_conversation.Conversation: + r"""Call the update conversation method over HTTP. Args: - request (~.conversational_search_service.ListConversationsRequest): - The request object. Request for ListConversations method. + request (~.conversational_search_service.UpdateConversationRequest): + The request object. Request for UpdateConversation + method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -847,34 +1748,42 @@ def __call__( sent along with the request as metadata. Returns: - ~.conversational_search_service.ListConversationsResponse: - Response for ListConversations - method. + ~.gcd_conversation.Conversation: + External conversation proto + definition. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/conversations", + "method": "patch", + "uri": "/v1/{conversation.name=projects/*/locations/*/dataStores/*/conversations/*}", + "body": "conversation", }, { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", + "method": "patch", + "uri": "/v1/{conversation.name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", + "body": "conversation", }, { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + "method": "patch", + "uri": "/v1/{conversation.name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + "body": "conversation", }, ] - request, metadata = self._interceptor.pre_list_conversations( + request, metadata = self._interceptor.pre_update_conversation( request, metadata ) - pb_request = conversational_search_service.ListConversationsRequest.pb( + pb_request = conversational_search_service.UpdateConversationRequest.pb( request ) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -897,6 +1806,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -905,16 +1815,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = conversational_search_service.ListConversationsResponse() - pb_resp = conversational_search_service.ListConversationsResponse.pb(resp) + resp = gcd_conversation.Conversation() + pb_resp = gcd_conversation.Conversation.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_conversations(resp) + resp = self._interceptor.post_update_conversation(resp) return resp - class _UpdateConversation(ConversationalSearchServiceRestStub): + class _UpdateSession(ConversationalSearchServiceRestStub): def __hash__(self): - return hash("UpdateConversation") + return hash("UpdateSession") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -928,18 +1838,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: conversational_search_service.UpdateConversationRequest, + request: conversational_search_service.UpdateSessionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> gcd_conversation.Conversation: - r"""Call the update conversation method over HTTP. + ) -> gcd_session.Session: + r"""Call the update session method over HTTP. Args: - request (~.conversational_search_service.UpdateConversationRequest): - The request object. Request for UpdateConversation - method. + request (~.conversational_search_service.UpdateSessionRequest): + The request object. Request for UpdateSession method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -947,35 +1856,29 @@ def __call__( sent along with the request as metadata. Returns: - ~.gcd_conversation.Conversation: - External conversation proto - definition. - + ~.gcd_session.Session: + External session proto definition. """ http_options: List[Dict[str, str]] = [ { "method": "patch", - "uri": "/v1/{conversation.name=projects/*/locations/*/dataStores/*/conversations/*}", - "body": "conversation", + "uri": "/v1/{session.name=projects/*/locations/*/dataStores/*/sessions/*}", + "body": "session", }, { "method": "patch", - "uri": "/v1/{conversation.name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", - "body": "conversation", + "uri": "/v1/{session.name=projects/*/locations/*/collections/*/dataStores/*/sessions/*}", + "body": "session", }, { "method": "patch", - "uri": "/v1/{conversation.name=projects/*/locations/*/collections/*/engines/*/conversations/*}", - "body": "conversation", + "uri": "/v1/{session.name=projects/*/locations/*/collections/*/engines/*/sessions/*}", + "body": "session", }, ] - request, metadata = self._interceptor.pre_update_conversation( - request, metadata - ) - pb_request = conversational_search_service.UpdateConversationRequest.pb( - request - ) + request, metadata = self._interceptor.pre_update_session(request, metadata) + pb_request = conversational_search_service.UpdateSessionRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1014,13 +1917,24 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = gcd_conversation.Conversation() - pb_resp = gcd_conversation.Conversation.pb(resp) + resp = gcd_session.Session() + pb_resp = gcd_session.Session.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_conversation(resp) + resp = self._interceptor.post_update_session(resp) return resp + @property + def answer_query( + self, + ) -> Callable[ + [conversational_search_service.AnswerQueryRequest], + conversational_search_service.AnswerQueryResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnswerQuery(self._session, self._host, self._interceptor) # type: ignore + @property def converse_conversation( self, @@ -1043,6 +1957,16 @@ def create_conversation( # In C++ this would require a dynamic_cast return self._CreateConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def create_session( + self, + ) -> Callable[ + [conversational_search_service.CreateSessionRequest], gcd_session.Session + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSession(self._session, self._host, self._interceptor) # type: ignore + @property def delete_conversation( self, @@ -1053,6 +1977,24 @@ def delete_conversation( # In C++ this would require a dynamic_cast return self._DeleteConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_session( + self, + ) -> Callable[ + [conversational_search_service.DeleteSessionRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_answer( + self, + ) -> Callable[[conversational_search_service.GetAnswerRequest], answer.Answer]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAnswer(self._session, self._host, self._interceptor) # type: ignore + @property def get_conversation( self, @@ -1064,6 +2006,14 @@ def get_conversation( # In C++ this would require a dynamic_cast return self._GetConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def get_session( + self, + ) -> Callable[[conversational_search_service.GetSessionRequest], session.Session]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSession(self._session, self._host, self._interceptor) # type: ignore + @property def list_conversations( self, @@ -1075,6 +2025,17 @@ def list_conversations( # In C++ this would require a dynamic_cast return self._ListConversations(self._session, self._host, self._interceptor) # type: ignore + @property + def list_sessions( + self, + ) -> Callable[ + [conversational_search_service.ListSessionsRequest], + conversational_search_service.ListSessionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore + @property def update_conversation( self, @@ -1086,6 +2047,91 @@ def update_conversation( # In C++ this would require a dynamic_cast return self._UpdateConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def update_session( + self, + ) -> Callable[ + [conversational_search_service.UpdateSessionRequest], gcd_session.Session + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ConversationalSearchServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py index 2d51b6409b81..428c874802d2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py @@ -52,10 +52,14 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.discoveryengine_v1.services.data_store_service import pagers +from google.cloud.discoveryengine_v1.types import ( + data_store_service, + document_processing_config, + schema, +) from google.cloud.discoveryengine_v1.types import common from google.cloud.discoveryengine_v1.types import data_store from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store -from google.cloud.discoveryengine_v1.types import data_store_service, schema from .client import DataStoreServiceClient from .transports.base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport @@ -81,6 +85,12 @@ class DataStoreServiceAsyncClient: parse_collection_path = staticmethod(DataStoreServiceClient.parse_collection_path) data_store_path = staticmethod(DataStoreServiceClient.data_store_path) parse_data_store_path = staticmethod(DataStoreServiceClient.parse_data_store_path) + document_processing_config_path = staticmethod( + DataStoreServiceClient.document_processing_config_path + ) + parse_document_processing_config_path = staticmethod( + DataStoreServiceClient.parse_document_processing_config_path + ) schema_path = staticmethod(DataStoreServiceClient.schema_path) parse_schema_path = staticmethod(DataStoreServiceClient.parse_schema_path) common_billing_account_path = staticmethod( @@ -1085,6 +1095,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "DataStoreServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py index 5b9d14db160a..33bf0211e3b4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py @@ -57,10 +57,14 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.discoveryengine_v1.services.data_store_service import pagers +from google.cloud.discoveryengine_v1.types import ( + data_store_service, + document_processing_config, + schema, +) from google.cloud.discoveryengine_v1.types import common from google.cloud.discoveryengine_v1.types import data_store from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store -from google.cloud.discoveryengine_v1.types import data_store_service, schema from .transports.base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport from .transports.grpc import DataStoreServiceGrpcTransport @@ -243,6 +247,28 @@ def parse_data_store_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def document_processing_config_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified document_processing_config string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/documentProcessingConfig".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_document_processing_config_path(path: str) -> Dict[str, str]: + """Parses a document_processing_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/documentProcessingConfig$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def schema_path( project: str, @@ -1548,6 +1574,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py index 504091068af6..651ecb0eb9e4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py @@ -240,6 +240,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py index 8eb1895599ed..563b84f4e523 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py @@ -406,6 +406,23 @@ def update_data_store( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py index a32fd3f4ba05..e16588e72af3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py @@ -444,6 +444,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py index acc0e7aef168..65051ae919e2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py @@ -235,6 +235,27 @@ def post_update_data_store( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -391,6 +412,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1072,6 +1110,81 @@ def update_data_store( # In C++ this would require a dynamic_cast return self._UpdateDataStore(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py index cee7b0684c16..68f1c0a37fb2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py @@ -592,7 +592,7 @@ async def sample_create_document(): document_id (:class:`str`): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1.Document.name]. If the caller does not have permission to create the @@ -739,7 +739,7 @@ async def sample_update_document(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided imported 'document' to update. - If not set, will by default update all + If not set, by default updates all fields. This corresponds to the ``update_mask`` field @@ -925,8 +925,7 @@ async def import_documents( ) -> operation_async.AsyncOperation: r"""Bulk import of multiple [Document][google.cloud.discoveryengine.v1.Document]s. Request - processing may be synchronous. Non-existing items will be - created. + processing may be synchronous. Non-existing items are created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1.Document]s to be @@ -1258,6 +1257,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "DocumentServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py index 47541250b02f..680952d90887 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -1054,7 +1054,7 @@ def sample_create_document(): document_id (str): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1.Document.name]. If the caller does not have permission to create the @@ -1198,7 +1198,7 @@ def sample_update_document(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided imported 'document' to update. - If not set, will by default update all + If not set, by default updates all fields. This corresponds to the ``update_mask`` field @@ -1378,8 +1378,7 @@ def import_documents( ) -> operation.Operation: r"""Bulk import of multiple [Document][google.cloud.discoveryengine.v1.Document]s. Request - processing may be synchronous. Non-existing items will be - created. + processing may be synchronous. Non-existing items are created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1.Document]s to be @@ -1720,6 +1719,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py index 3518ecf9fd15..fa93049e8125 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py @@ -282,6 +282,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py index fe6098e4cc82..bc29d46b2ac2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py @@ -400,8 +400,7 @@ def import_documents( Bulk import of multiple [Document][google.cloud.discoveryengine.v1.Document]s. Request - processing may be synchronous. Non-existing items will be - created. + processing may be synchronous. Non-existing items are created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1.Document]s to be @@ -472,6 +471,23 @@ def purge_documents( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py index a89bf69f4be9..6975daf4c95a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py @@ -413,8 +413,7 @@ def import_documents( Bulk import of multiple [Document][google.cloud.discoveryengine.v1.Document]s. Request - processing may be synchronous. Non-existing items will be - created. + processing may be synchronous. Non-existing items are created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1.Document]s to be @@ -536,6 +535,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py index 534bc0ece61a..1418d3d2036b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -285,6 +285,27 @@ def post_update_document( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -441,6 +462,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1314,6 +1352,81 @@ def update_document( # In C++ this would require a dynamic_cast return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DocumentServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py index 2a1e82c72ed0..502498e7c39d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py @@ -1048,6 +1048,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "EngineServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py index 5fe49c259d3a..81c7fb5fd3d9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py @@ -1488,6 +1488,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py index 604606f829f9..0011ffad3f8e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py @@ -240,6 +240,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py index 7ec984f9cdad..c9e3ac953a5e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py @@ -387,6 +387,23 @@ def list_engines( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py index d4e46ed3a7df..d794e95b489f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py @@ -430,6 +430,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py index 3c6c75db618e..9f8887074424 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py @@ -231,6 +231,27 @@ def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -386,6 +407,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1030,6 +1068,81 @@ def update_engine( # In C++ this would require a dynamic_cast return self._UpdateEngine(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/__init__.py new file mode 100644 index 000000000000..8a4c978a5f5e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GroundedGenerationServiceAsyncClient +from .client import GroundedGenerationServiceClient + +__all__ = ( + "GroundedGenerationServiceClient", + "GroundedGenerationServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py new file mode 100644 index 000000000000..10ef7a8089dc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py @@ -0,0 +1,552 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import grounded_generation_service, grounding + +from .client import GroundedGenerationServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, GroundedGenerationServiceTransport +from .transports.grpc_asyncio import GroundedGenerationServiceGrpcAsyncIOTransport + + +class GroundedGenerationServiceAsyncClient: + """Service for grounded generation.""" + + _client: GroundedGenerationServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GroundedGenerationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GroundedGenerationServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + GroundedGenerationServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = GroundedGenerationServiceClient._DEFAULT_UNIVERSE + + grounding_config_path = staticmethod( + GroundedGenerationServiceClient.grounding_config_path + ) + parse_grounding_config_path = staticmethod( + GroundedGenerationServiceClient.parse_grounding_config_path + ) + common_billing_account_path = staticmethod( + GroundedGenerationServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GroundedGenerationServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + GroundedGenerationServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + GroundedGenerationServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GroundedGenerationServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GroundedGenerationServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + GroundedGenerationServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + GroundedGenerationServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + GroundedGenerationServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + GroundedGenerationServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroundedGenerationServiceAsyncClient: The constructed client. + """ + return GroundedGenerationServiceClient.from_service_account_info.__func__(GroundedGenerationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroundedGenerationServiceAsyncClient: The constructed client. + """ + return GroundedGenerationServiceClient.from_service_account_file.__func__(GroundedGenerationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GroundedGenerationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GroundedGenerationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GroundedGenerationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(GroundedGenerationServiceClient).get_transport_class, + type(GroundedGenerationServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + GroundedGenerationServiceTransport, + Callable[..., GroundedGenerationServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the grounded generation service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GroundedGenerationServiceTransport,Callable[..., GroundedGenerationServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GroundedGenerationServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GroundedGenerationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def check_grounding( + self, + request: Optional[ + Union[grounded_generation_service.CheckGroundingRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grounded_generation_service.CheckGroundingResponse: + r"""Performs a grounding check. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_check_grounding(): + # Create a client + client = discoveryengine_v1.GroundedGenerationServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CheckGroundingRequest( + grounding_config="grounding_config_value", + ) + + # Make the request + response = await client.check_grounding(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CheckGroundingRequest, dict]]): + The request object. Request message for + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.CheckGroundingResponse: + Response message for the + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, grounded_generation_service.CheckGroundingRequest): + request = grounded_generation_service.CheckGroundingRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.check_grounding + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("grounding_config", request.grounding_config),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "GroundedGenerationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GroundedGenerationServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py new file mode 100644 index 000000000000..75fc5793a300 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py @@ -0,0 +1,969 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import grounded_generation_service, grounding + +from .transports.base import DEFAULT_CLIENT_INFO, GroundedGenerationServiceTransport +from .transports.grpc import GroundedGenerationServiceGrpcTransport +from .transports.grpc_asyncio import GroundedGenerationServiceGrpcAsyncIOTransport +from .transports.rest import GroundedGenerationServiceRestTransport + + +class GroundedGenerationServiceClientMeta(type): + """Metaclass for the GroundedGenerationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GroundedGenerationServiceTransport]] + _transport_registry["grpc"] = GroundedGenerationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = GroundedGenerationServiceGrpcAsyncIOTransport + _transport_registry["rest"] = GroundedGenerationServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GroundedGenerationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GroundedGenerationServiceClient(metaclass=GroundedGenerationServiceClientMeta): + """Service for grounded generation.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroundedGenerationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroundedGenerationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GroundedGenerationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GroundedGenerationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def grounding_config_path( + project: str, + location: str, + grounding_config: str, + ) -> str: + """Returns a fully-qualified grounding_config string.""" + return "projects/{project}/locations/{location}/groundingConfigs/{grounding_config}".format( + project=project, + location=location, + grounding_config=grounding_config, + ) + + @staticmethod + def parse_grounding_config_path(path: str) -> Dict[str, str]: + """Parses a grounding_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/groundingConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GroundedGenerationServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GroundedGenerationServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + GroundedGenerationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GroundedGenerationServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = GroundedGenerationServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or GroundedGenerationServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + GroundedGenerationServiceTransport, + Callable[..., GroundedGenerationServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the grounded generation service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GroundedGenerationServiceTransport,Callable[..., GroundedGenerationServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GroundedGenerationServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GroundedGenerationServiceClient._read_environment_variables() + self._client_cert_source = ( + GroundedGenerationServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = GroundedGenerationServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GroundedGenerationServiceTransport) + if transport_provided: + # transport is a GroundedGenerationServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GroundedGenerationServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GroundedGenerationServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GroundedGenerationServiceTransport], + Callable[..., GroundedGenerationServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GroundedGenerationServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def check_grounding( + self, + request: Optional[ + Union[grounded_generation_service.CheckGroundingRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grounded_generation_service.CheckGroundingResponse: + r"""Performs a grounding check. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_check_grounding(): + # Create a client + client = discoveryengine_v1.GroundedGenerationServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CheckGroundingRequest( + grounding_config="grounding_config_value", + ) + + # Make the request + response = client.check_grounding(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CheckGroundingRequest, dict]): + The request object. Request message for + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.CheckGroundingResponse: + Response message for the + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, grounded_generation_service.CheckGroundingRequest): + request = grounded_generation_service.CheckGroundingRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_grounding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("grounding_config", request.grounding_config),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GroundedGenerationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GroundedGenerationServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/__init__.py new file mode 100644 index 000000000000..b2598d761505 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GroundedGenerationServiceTransport +from .grpc import GroundedGenerationServiceGrpcTransport +from .grpc_asyncio import GroundedGenerationServiceGrpcAsyncIOTransport +from .rest import ( + GroundedGenerationServiceRestInterceptor, + GroundedGenerationServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[GroundedGenerationServiceTransport]] +_transport_registry["grpc"] = GroundedGenerationServiceGrpcTransport +_transport_registry["grpc_asyncio"] = GroundedGenerationServiceGrpcAsyncIOTransport +_transport_registry["rest"] = GroundedGenerationServiceRestTransport + +__all__ = ( + "GroundedGenerationServiceTransport", + "GroundedGenerationServiceGrpcTransport", + "GroundedGenerationServiceGrpcAsyncIOTransport", + "GroundedGenerationServiceRestTransport", + "GroundedGenerationServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/base.py new file mode 100644 index 000000000000..f141918315ee --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/base.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import grounded_generation_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GroundedGenerationServiceTransport(abc.ABC): + """Abstract transport class for GroundedGenerationService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.check_grounding: gapic_v1.method.wrap_method( + self.check_grounding, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def check_grounding( + self, + ) -> Callable[ + [grounded_generation_service.CheckGroundingRequest], + Union[ + grounded_generation_service.CheckGroundingResponse, + Awaitable[grounded_generation_service.CheckGroundingResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GroundedGenerationServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/grpc.py new file mode 100644 index 000000000000..32eb2440baf5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/grpc.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import grounded_generation_service + +from .base import DEFAULT_CLIENT_INFO, GroundedGenerationServiceTransport + + +class GroundedGenerationServiceGrpcTransport(GroundedGenerationServiceTransport): + """gRPC backend transport for GroundedGenerationService. + + Service for grounded generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def check_grounding( + self, + ) -> Callable[ + [grounded_generation_service.CheckGroundingRequest], + grounded_generation_service.CheckGroundingResponse, + ]: + r"""Return a callable for the check grounding method over gRPC. + + Performs a grounding check. + + Returns: + Callable[[~.CheckGroundingRequest], + ~.CheckGroundingResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_grounding" not in self._stubs: + self._stubs["check_grounding"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.GroundedGenerationService/CheckGrounding", + request_serializer=grounded_generation_service.CheckGroundingRequest.serialize, + response_deserializer=grounded_generation_service.CheckGroundingResponse.deserialize, + ) + return self._stubs["check_grounding"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GroundedGenerationServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..0f0ea6cbea85 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/grpc_asyncio.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import grounded_generation_service + +from .base import DEFAULT_CLIENT_INFO, GroundedGenerationServiceTransport +from .grpc import GroundedGenerationServiceGrpcTransport + + +class GroundedGenerationServiceGrpcAsyncIOTransport(GroundedGenerationServiceTransport): + """gRPC AsyncIO backend transport for GroundedGenerationService. + + Service for grounded generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def check_grounding( + self, + ) -> Callable[ + [grounded_generation_service.CheckGroundingRequest], + Awaitable[grounded_generation_service.CheckGroundingResponse], + ]: + r"""Return a callable for the check grounding method over gRPC. + + Performs a grounding check. + + Returns: + Callable[[~.CheckGroundingRequest], + Awaitable[~.CheckGroundingResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_grounding" not in self._stubs: + self._stubs["check_grounding"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.GroundedGenerationService/CheckGrounding", + request_serializer=grounded_generation_service.CheckGroundingRequest.serialize, + response_deserializer=grounded_generation_service.CheckGroundingResponse.deserialize, + ) + return self._stubs["check_grounding"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.check_grounding: gapic_v1.method_async.wrap_method( + self.check_grounding, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("GroundedGenerationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py new file mode 100644 index 000000000000..ee8a9194089c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py @@ -0,0 +1,707 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import grounded_generation_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GroundedGenerationServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GroundedGenerationServiceRestInterceptor: + """Interceptor for GroundedGenerationService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GroundedGenerationServiceRestTransport. + + .. code-block:: python + class MyCustomGroundedGenerationServiceInterceptor(GroundedGenerationServiceRestInterceptor): + def pre_check_grounding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_grounding(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GroundedGenerationServiceRestTransport(interceptor=MyCustomGroundedGenerationServiceInterceptor()) + client = GroundedGenerationServiceClient(transport=transport) + + + """ + + def pre_check_grounding( + self, + request: grounded_generation_service.CheckGroundingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + grounded_generation_service.CheckGroundingRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for check_grounding + + Override in a subclass to manipulate the request or metadata + before they are sent to the GroundedGenerationService server. + """ + return request, metadata + + def post_check_grounding( + self, response: grounded_generation_service.CheckGroundingResponse + ) -> grounded_generation_service.CheckGroundingResponse: + """Post-rpc interceptor for check_grounding + + Override in a subclass to manipulate the response + after it is returned by the GroundedGenerationService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GroundedGenerationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the GroundedGenerationService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GroundedGenerationService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the GroundedGenerationService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the GroundedGenerationService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the GroundedGenerationService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GroundedGenerationServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GroundedGenerationServiceRestInterceptor + + +class GroundedGenerationServiceRestTransport(GroundedGenerationServiceTransport): + """REST backend transport for GroundedGenerationService. + + Service for grounded generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GroundedGenerationServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GroundedGenerationServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CheckGrounding(GroundedGenerationServiceRestStub): + def __hash__(self): + return hash("CheckGrounding") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: grounded_generation_service.CheckGroundingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grounded_generation_service.CheckGroundingResponse: + r"""Call the check grounding method over HTTP. + + Args: + request (~.grounded_generation_service.CheckGroundingRequest): + The request object. Request message for + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grounded_generation_service.CheckGroundingResponse: + Response message for the + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{grounding_config=projects/*/locations/*/groundingConfigs/*}:check", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_check_grounding(request, metadata) + pb_request = grounded_generation_service.CheckGroundingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = grounded_generation_service.CheckGroundingResponse() + pb_resp = grounded_generation_service.CheckGroundingResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_grounding(resp) + return resp + + @property + def check_grounding( + self, + ) -> Callable[ + [grounded_generation_service.CheckGroundingRequest], + grounded_generation_service.CheckGroundingResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckGrounding(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(GroundedGenerationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(GroundedGenerationServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(GroundedGenerationServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GroundedGenerationServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/__init__.py new file mode 100644 index 000000000000..245aea0dfd81 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ProjectServiceAsyncClient +from .client import ProjectServiceClient + +__all__ = ( + "ProjectServiceClient", + "ProjectServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py new file mode 100644 index 000000000000..30a15a92dc91 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import project, project_service + +from .client import ProjectServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .transports.grpc_asyncio import ProjectServiceGrpcAsyncIOTransport + + +class ProjectServiceAsyncClient: + """Service for operations on the + [Project][google.cloud.discoveryengine.v1.Project]. + """ + + _client: ProjectServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProjectServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProjectServiceClient._DEFAULT_UNIVERSE + + project_path = staticmethod(ProjectServiceClient.project_path) + parse_project_path = staticmethod(ProjectServiceClient.parse_project_path) + common_billing_account_path = staticmethod( + ProjectServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProjectServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProjectServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ProjectServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ProjectServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ProjectServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProjectServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ProjectServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ProjectServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ProjectServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceAsyncClient: The constructed client. + """ + return ProjectServiceClient.from_service_account_info.__func__(ProjectServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceAsyncClient: The constructed client. + """ + return ProjectServiceClient.from_service_account_file.__func__(ProjectServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProjectServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProjectServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProjectServiceTransport, Callable[..., ProjectServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the project service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProjectServiceTransport,Callable[..., ProjectServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProjectServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def provision_project( + self, + request: Optional[Union[project_service.ProvisionProjectRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_provision_project(): + # Create a client + client = discoveryengine_v1.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ProvisionProjectRequest, dict]]): + The request object. Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1.ProjectService.ProvisionProject] + method. + name (:class:`str`): + Required. Full resource name of a + [Project][google.cloud.discoveryengine.v1.Project], such + as ``projects/{project_id_or_number}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.Project` + Metadata and configurations for a Google Cloud project + in the service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.ProvisionProjectRequest): + request = project_service.ProvisionProjectRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.provision_project + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + project.Project, + metadata_type=project_service.ProvisionProjectMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ProjectServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProjectServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py new file mode 100644 index 000000000000..c964e97a6679 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py @@ -0,0 +1,998 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import project, project_service + +from .transports.base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .transports.grpc import ProjectServiceGrpcTransport +from .transports.grpc_asyncio import ProjectServiceGrpcAsyncIOTransport +from .transports.rest import ProjectServiceRestTransport + + +class ProjectServiceClientMeta(type): + """Metaclass for the ProjectService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ProjectServiceTransport]] + _transport_registry["grpc"] = ProjectServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ProjectServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ProjectServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProjectServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProjectServiceClient(metaclass=ProjectServiceClientMeta): + """Service for operations on the + [Project][google.cloud.discoveryengine.v1.Project]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProjectServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_project_path(path: str) -> Dict[str, str]: + """Parses a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProjectServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProjectServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProjectServiceTransport, Callable[..., ProjectServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the project service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProjectServiceTransport,Callable[..., ProjectServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProjectServiceClient._read_environment_variables() + self._client_cert_source = ProjectServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProjectServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProjectServiceTransport) + if transport_provided: + # transport is a ProjectServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProjectServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ProjectServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProjectServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def provision_project( + self, + request: Optional[Union[project_service.ProvisionProjectRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_provision_project(): + # Create a client + client = discoveryengine_v1.ProjectServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ProvisionProjectRequest, dict]): + The request object. Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1.ProjectService.ProvisionProject] + method. + name (str): + Required. Full resource name of a + [Project][google.cloud.discoveryengine.v1.Project], such + as ``projects/{project_id_or_number}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.Project` + Metadata and configurations for a Google Cloud project + in the service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.ProvisionProjectRequest): + request = project_service.ProvisionProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.provision_project] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + project.Project, + metadata_type=project_service.ProvisionProjectMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ProjectServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProjectServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/__init__.py new file mode 100644 index 000000000000..bfc15c764467 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProjectServiceTransport +from .grpc import ProjectServiceGrpcTransport +from .grpc_asyncio import ProjectServiceGrpcAsyncIOTransport +from .rest import ProjectServiceRestInterceptor, ProjectServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ProjectServiceTransport]] +_transport_registry["grpc"] = ProjectServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ProjectServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ProjectServiceRestTransport + +__all__ = ( + "ProjectServiceTransport", + "ProjectServiceGrpcTransport", + "ProjectServiceGrpcAsyncIOTransport", + "ProjectServiceRestTransport", + "ProjectServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/base.py new file mode 100644 index 000000000000..8a4c4fc62044 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/base.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import project_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProjectServiceTransport(abc.ABC): + """Abstract transport class for ProjectService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.provision_project: gapic_v1.method.wrap_method( + self.provision_project, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def provision_project( + self, + ) -> Callable[ + [project_service.ProvisionProjectRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProjectServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/grpc.py new file mode 100644 index 000000000000..b66b734c8bef --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/grpc.py @@ -0,0 +1,347 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import project_service + +from .base import DEFAULT_CLIENT_INFO, ProjectServiceTransport + + +class ProjectServiceGrpcTransport(ProjectServiceTransport): + """gRPC backend transport for ProjectService. + + Service for operations on the + [Project][google.cloud.discoveryengine.v1.Project]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def provision_project( + self, + ) -> Callable[[project_service.ProvisionProjectRequest], operations_pb2.Operation]: + r"""Return a callable for the provision project method over gRPC. + + Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + Returns: + Callable[[~.ProvisionProjectRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "provision_project" not in self._stubs: + self._stubs["provision_project"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ProjectService/ProvisionProject", + request_serializer=project_service.ProvisionProjectRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["provision_project"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProjectServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c5add0db0154 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/grpc_asyncio.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import project_service + +from .base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .grpc import ProjectServiceGrpcTransport + + +class ProjectServiceGrpcAsyncIOTransport(ProjectServiceTransport): + """gRPC AsyncIO backend transport for ProjectService. + + Service for operations on the + [Project][google.cloud.discoveryengine.v1.Project]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def provision_project( + self, + ) -> Callable[ + [project_service.ProvisionProjectRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the provision project method over gRPC. + + Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + Returns: + Callable[[~.ProvisionProjectRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "provision_project" not in self._stubs: + self._stubs["provision_project"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.ProjectService/ProvisionProject", + request_serializer=project_service.ProvisionProjectRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["provision_project"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.provision_project: gapic_v1.method_async.wrap_method( + self.provision_project, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ProjectServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py new file mode 100644 index 000000000000..b8a0f24db978 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py @@ -0,0 +1,879 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import project_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProjectServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProjectServiceRestInterceptor: + """Interceptor for ProjectService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProjectServiceRestTransport. + + .. code-block:: python + class MyCustomProjectServiceInterceptor(ProjectServiceRestInterceptor): + def pre_provision_project(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_provision_project(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProjectServiceRestTransport(interceptor=MyCustomProjectServiceInterceptor()) + client = ProjectServiceClient(transport=transport) + + + """ + + def pre_provision_project( + self, + request: project_service.ProvisionProjectRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.ProvisionProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for provision_project + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_provision_project( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for provision_project + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProjectServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProjectServiceRestInterceptor + + +class ProjectServiceRestTransport(ProjectServiceTransport): + """REST backend transport for ProjectService. + + Service for operations on the + [Project][google.cloud.discoveryengine.v1.Project]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProjectServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProjectServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ProvisionProject(ProjectServiceRestStub): + def __hash__(self): + return hash("ProvisionProject") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.ProvisionProjectRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the provision project method over HTTP. + + Args: + request (~.project_service.ProvisionProjectRequest): + The request object. Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1.ProjectService.ProvisionProject] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*}:provision", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_provision_project( + request, metadata + ) + pb_request = project_service.ProvisionProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_provision_project(resp) + return resp + + @property + def provision_project( + self, + ) -> Callable[[project_service.ProvisionProjectRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProvisionProject(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProjectServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/__init__.py new file mode 100644 index 000000000000..f34036ed49c9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import RankServiceAsyncClient +from .client import RankServiceClient + +__all__ = ( + "RankServiceClient", + "RankServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py new file mode 100644 index 000000000000..f67bf0b27bd2 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import rank_service + +from .client import RankServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, RankServiceTransport +from .transports.grpc_asyncio import RankServiceGrpcAsyncIOTransport + + +class RankServiceAsyncClient: + """Service for ranking text records.""" + + _client: RankServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = RankServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RankServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = RankServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = RankServiceClient._DEFAULT_UNIVERSE + + ranking_config_path = staticmethod(RankServiceClient.ranking_config_path) + parse_ranking_config_path = staticmethod( + RankServiceClient.parse_ranking_config_path + ) + common_billing_account_path = staticmethod( + RankServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + RankServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(RankServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(RankServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(RankServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + RankServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(RankServiceClient.common_project_path) + parse_common_project_path = staticmethod( + RankServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(RankServiceClient.common_location_path) + parse_common_location_path = staticmethod( + RankServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RankServiceAsyncClient: The constructed client. + """ + return RankServiceClient.from_service_account_info.__func__(RankServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RankServiceAsyncClient: The constructed client. + """ + return RankServiceClient.from_service_account_file.__func__(RankServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RankServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RankServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RankServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(RankServiceClient).get_transport_class, type(RankServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, RankServiceTransport, Callable[..., RankServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the rank service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RankServiceTransport,Callable[..., RankServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RankServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RankServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def rank( + self, + request: Optional[Union[rank_service.RankRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rank_service.RankResponse: + r"""Ranks a list of text records based on the given input + query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_rank(): + # Create a client + client = discoveryengine_v1.RankServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RankRequest( + ranking_config="ranking_config_value", + ) + + # Make the request + response = await client.rank(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.RankRequest, dict]]): + The request object. Request message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.RankResponse: + Response message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rank_service.RankRequest): + request = rank_service.RankRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.rank] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("ranking_config", request.ranking_config),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "RankServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RankServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py new file mode 100644 index 000000000000..cb4c617a1e9d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py @@ -0,0 +1,954 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import rank_service + +from .transports.base import DEFAULT_CLIENT_INFO, RankServiceTransport +from .transports.grpc import RankServiceGrpcTransport +from .transports.grpc_asyncio import RankServiceGrpcAsyncIOTransport +from .transports.rest import RankServiceRestTransport + + +class RankServiceClientMeta(type): + """Metaclass for the RankService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[RankServiceTransport]] + _transport_registry["grpc"] = RankServiceGrpcTransport + _transport_registry["grpc_asyncio"] = RankServiceGrpcAsyncIOTransport + _transport_registry["rest"] = RankServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RankServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RankServiceClient(metaclass=RankServiceClientMeta): + """Service for ranking text records.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RankServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RankServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RankServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RankServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def ranking_config_path( + project: str, + location: str, + ranking_config: str, + ) -> str: + """Returns a fully-qualified ranking_config string.""" + return "projects/{project}/locations/{location}/rankingConfigs/{ranking_config}".format( + project=project, + location=location, + ranking_config=ranking_config, + ) + + @staticmethod + def parse_ranking_config_path(path: str) -> Dict[str, str]: + """Parses a ranking_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/rankingConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RankServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RankServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = RankServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RankServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = RankServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or RankServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, RankServiceTransport, Callable[..., RankServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the rank service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RankServiceTransport,Callable[..., RankServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RankServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = RankServiceClient._read_environment_variables() + self._client_cert_source = RankServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = RankServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RankServiceTransport) + if transport_provided: + # transport is a RankServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(RankServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or RankServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RankServiceTransport], Callable[..., RankServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RankServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def rank( + self, + request: Optional[Union[rank_service.RankRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rank_service.RankResponse: + r"""Ranks a list of text records based on the given input + query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_rank(): + # Create a client + client = discoveryengine_v1.RankServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RankRequest( + ranking_config="ranking_config_value", + ) + + # Make the request + response = client.rank(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.RankRequest, dict]): + The request object. Request message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.RankResponse: + Response message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rank_service.RankRequest): + request = rank_service.RankRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rank] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("ranking_config", request.ranking_config),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RankServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RankServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/__init__.py new file mode 100644 index 000000000000..0084baa3bfcc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RankServiceTransport +from .grpc import RankServiceGrpcTransport +from .grpc_asyncio import RankServiceGrpcAsyncIOTransport +from .rest import RankServiceRestInterceptor, RankServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RankServiceTransport]] +_transport_registry["grpc"] = RankServiceGrpcTransport +_transport_registry["grpc_asyncio"] = RankServiceGrpcAsyncIOTransport +_transport_registry["rest"] = RankServiceRestTransport + +__all__ = ( + "RankServiceTransport", + "RankServiceGrpcTransport", + "RankServiceGrpcAsyncIOTransport", + "RankServiceRestTransport", + "RankServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/base.py new file mode 100644 index 000000000000..9aecf7851e53 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/base.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import rank_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class RankServiceTransport(abc.ABC): + """Abstract transport class for RankService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.rank: gapic_v1.method.wrap_method( + self.rank, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def rank( + self, + ) -> Callable[ + [rank_service.RankRequest], + Union[rank_service.RankResponse, Awaitable[rank_service.RankResponse]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("RankServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/grpc.py new file mode 100644 index 000000000000..5c79d7f12a3d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/grpc.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import rank_service + +from .base import DEFAULT_CLIENT_INFO, RankServiceTransport + + +class RankServiceGrpcTransport(RankServiceTransport): + """gRPC backend transport for RankService. + + Service for ranking text records. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def rank(self) -> Callable[[rank_service.RankRequest], rank_service.RankResponse]: + r"""Return a callable for the rank method over gRPC. + + Ranks a list of text records based on the given input + query. + + Returns: + Callable[[~.RankRequest], + ~.RankResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rank" not in self._stubs: + self._stubs["rank"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.RankService/Rank", + request_serializer=rank_service.RankRequest.serialize, + response_deserializer=rank_service.RankResponse.deserialize, + ) + return self._stubs["rank"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("RankServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..26dd1a52ccee --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/grpc_asyncio.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import rank_service + +from .base import DEFAULT_CLIENT_INFO, RankServiceTransport +from .grpc import RankServiceGrpcTransport + + +class RankServiceGrpcAsyncIOTransport(RankServiceTransport): + """gRPC AsyncIO backend transport for RankService. + + Service for ranking text records. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def rank( + self, + ) -> Callable[[rank_service.RankRequest], Awaitable[rank_service.RankResponse]]: + r"""Return a callable for the rank method over gRPC. + + Ranks a list of text records based on the given input + query. + + Returns: + Callable[[~.RankRequest], + Awaitable[~.RankResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rank" not in self._stubs: + self._stubs["rank"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.RankService/Rank", + request_serializer=rank_service.RankRequest.serialize, + response_deserializer=rank_service.RankResponse.deserialize, + ) + return self._stubs["rank"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.rank: gapic_v1.method_async.wrap_method( + self.rank, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("RankServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py new file mode 100644 index 000000000000..8796a94833df --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py @@ -0,0 +1,698 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import rank_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RankServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RankServiceRestInterceptor: + """Interceptor for RankService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RankServiceRestTransport. + + .. code-block:: python + class MyCustomRankServiceInterceptor(RankServiceRestInterceptor): + def pre_rank(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rank(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RankServiceRestTransport(interceptor=MyCustomRankServiceInterceptor()) + client = RankServiceClient(transport=transport) + + + """ + + def pre_rank( + self, request: rank_service.RankRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[rank_service.RankRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rank + + Override in a subclass to manipulate the request or metadata + before they are sent to the RankService server. + """ + return request, metadata + + def post_rank( + self, response: rank_service.RankResponse + ) -> rank_service.RankResponse: + """Post-rpc interceptor for rank + + Override in a subclass to manipulate the response + after it is returned by the RankService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RankService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RankService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RankService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RankService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the RankService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the RankService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RankServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RankServiceRestInterceptor + + +class RankServiceRestTransport(RankServiceTransport): + """REST backend transport for RankService. + + Service for ranking text records. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RankServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RankServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Rank(RankServiceRestStub): + def __hash__(self): + return hash("Rank") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rank_service.RankRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rank_service.RankResponse: + r"""Call the rank method over HTTP. + + Args: + request (~.rank_service.RankRequest): + The request object. Request message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rank_service.RankResponse: + Response message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{ranking_config=projects/*/locations/*/rankingConfigs/*}:rank", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rank(request, metadata) + pb_request = rank_service.RankRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rank_service.RankResponse() + pb_resp = rank_service.RankResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rank(resp) + return resp + + @property + def rank(self) -> Callable[[rank_service.RankRequest], rank_service.RankResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Rank(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RankServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(RankServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(RankServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RankServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py index fd0a6a63da96..b1c04a5401c8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py @@ -65,8 +65,14 @@ class RecommendationServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = RecommendationServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = RecommendationServiceClient._DEFAULT_UNIVERSE + data_store_path = staticmethod(RecommendationServiceClient.data_store_path) + parse_data_store_path = staticmethod( + RecommendationServiceClient.parse_data_store_path + ) document_path = staticmethod(RecommendationServiceClient.document_path) parse_document_path = staticmethod(RecommendationServiceClient.parse_document_path) + engine_path = staticmethod(RecommendationServiceClient.engine_path) + parse_engine_path = staticmethod(RecommendationServiceClient.parse_engine_path) serving_config_path = staticmethod(RecommendationServiceClient.serving_config_path) parse_serving_config_path = staticmethod( RecommendationServiceClient.parse_serving_config_path @@ -475,6 +481,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "RecommendationServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py index 7426f1c7541e..eda0e663db13 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py @@ -185,6 +185,28 @@ def transport(self) -> RecommendationServiceTransport: """ return self._transport + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def document_path( project: str, @@ -211,6 +233,30 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def serving_config_path( project: str, @@ -930,6 +976,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/base.py index c138b8e24ed0..acb5b09b5e65 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/base.py @@ -177,6 +177,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc.py index eecf25c482b1..574a45eea8e2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc.py @@ -268,6 +268,23 @@ def recommend( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc_asyncio.py index 70c3c928945f..676bcf5c78d2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/grpc_asyncio.py @@ -282,6 +282,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py index ada02cbdf27a..8b3979d914df 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py @@ -103,6 +103,27 @@ def post_recommend( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RecommendationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RecommendationService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -361,6 +382,81 @@ def recommend( # In C++ this would require a dynamic_cast return self._Recommend(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RecommendationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py index 8e13173a9a8e..670bb13344c4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py @@ -569,7 +569,7 @@ async def sample_create_schema(): schema_id (:class:`str`): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1.Schema], which - will become the final component of the + becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1.Schema.name]. This field should conform to @@ -999,6 +999,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SchemaServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py index 7551eb91c7d3..ca30909a27df 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py @@ -1003,7 +1003,7 @@ def sample_create_schema(): schema_id (str): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1.Schema], which - will become the final component of the + becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1.Schema.name]. This field should conform to @@ -1438,6 +1438,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py index d405c52348eb..6cc99fe4be42 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py @@ -238,6 +238,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py index 0861a92bf4fa..72c360aaf4f3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py @@ -385,6 +385,23 @@ def delete_schema( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py index 645db9cb1081..2ab95c66ffc3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py @@ -430,6 +430,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py index 768ae9c3503b..214d34eaee49 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -231,6 +231,27 @@ def post_update_schema( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -386,6 +407,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1049,6 +1087,81 @@ def update_schema( # In C++ this would require a dynamic_cast return self._UpdateSchema(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py index f5e75e2b8f13..192a49cadc04 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py @@ -481,6 +481,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SearchServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py index e58306968435..4463fa7e4c89 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py @@ -978,6 +978,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py index 585169a8feb8..25cfd9a322af 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py @@ -174,6 +174,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py index 00342baf6df6..15e5843993d1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py @@ -264,6 +264,23 @@ def search( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py index e9e9753bde94..614db8beb059 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py @@ -280,6 +280,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py index c443417791f1..c8165a22227b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -101,6 +101,27 @@ def post_search( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -359,6 +380,81 @@ def search( # In C++ this would require a dynamic_cast return self._Search(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py index fa9aee845f02..139425fd08e4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py @@ -1841,6 +1841,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SiteSearchEngineServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py index 573f93f8452b..332055c96bea 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py @@ -2268,6 +2268,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py index 26d2afd3eaf2..62b578cf0439 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py @@ -345,6 +345,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py index 271c72b9ce04..876f7dd34997 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py @@ -614,6 +614,23 @@ def fetch_domain_verification_status( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py index 74998e52ec6a..8d7df0caf150 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py @@ -690,6 +690,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py index 479330711130..fc45da7663ef 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py @@ -482,6 +482,27 @@ def post_update_target_site( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -636,6 +657,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -2105,6 +2143,81 @@ def update_target_site( # In C++ this would require a dynamic_cast return self._UpdateTargetSite(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py index d590f52cfbd2..bf1b9d55fbd4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py @@ -81,6 +81,8 @@ class UserEventServiceAsyncClient: parse_data_store_path = staticmethod(UserEventServiceClient.parse_data_store_path) document_path = staticmethod(UserEventServiceClient.document_path) parse_document_path = staticmethod(UserEventServiceClient.parse_document_path) + engine_path = staticmethod(UserEventServiceClient.engine_path) + parse_engine_path = staticmethod(UserEventServiceClient.parse_engine_path) common_billing_account_path = staticmethod( UserEventServiceClient.common_billing_account_path ) @@ -326,7 +328,7 @@ async def sample_write_user_event(): UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ # Create or coerce a protobuf request object. @@ -506,7 +508,7 @@ async def import_user_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Bulk import of User events. Request processing might + r"""Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -725,6 +727,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "UserEventServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py index 4e101219ea73..8fb0ebded5bb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -245,6 +245,30 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -771,7 +795,7 @@ def sample_write_user_event(): UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ # Create or coerce a protobuf request object. @@ -947,7 +971,7 @@ def import_user_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Bulk import of User events. Request processing might + r"""Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -1177,6 +1201,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py index 18484a06ed6f..5eb600bbe5ed 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py @@ -221,6 +221,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py index fa888f1951fc..ea02befd7420 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py @@ -320,7 +320,7 @@ def import_user_events( ) -> Callable[[import_config.ImportUserEventsRequest], operations_pb2.Operation]: r"""Return a callable for the import user events method over gRPC. - Bulk import of User events. Request processing might + Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -350,6 +350,23 @@ def import_user_events( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py index ce94c61fe981..c56974736fee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py @@ -332,7 +332,7 @@ def import_user_events( ]: r"""Return a callable for the import user events method over gRPC. - Bulk import of User events. Request processing might + Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -391,6 +391,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py index 6c544a8d63df..5c6faf2e8a56 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -176,6 +176,27 @@ def post_write_user_event( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -331,6 +352,23 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -756,7 +794,7 @@ def __call__( UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ @@ -771,6 +809,11 @@ def __call__( "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:write", "body": "user_event", }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/userEvents:write", + "body": "user_event", + }, ] request, metadata = self._interceptor.pre_write_user_event( request, metadata @@ -845,6 +888,81 @@ def write_user_event( # In C++ this would require a dynamic_cast return self._WriteUserEvent(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(UserEventServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py index a4eae0066145..1598ca42ab4d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py @@ -13,17 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .answer import Answer from .common import ( CustomAttribute, IndustryVertical, Interval, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) from .completion import SuggestionDenyListEntry from .completion_service import CompleteQueryRequest, CompleteQueryResponse +from .control import Condition, Control +from .control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from .conversation import ( Conversation, ConversationContext, @@ -32,14 +43,23 @@ TextInput, ) from .conversational_search_service import ( + AnswerQueryRequest, + AnswerQueryResponse, ConverseConversationRequest, ConverseConversationResponse, CreateConversationRequest, + CreateSessionRequest, DeleteConversationRequest, + DeleteSessionRequest, + GetAnswerRequest, GetConversationRequest, + GetSessionRequest, ListConversationsRequest, ListConversationsResponse, + ListSessionsRequest, + ListSessionsResponse, UpdateConversationRequest, + UpdateSessionRequest, ) from .data_store import DataStore from .data_store_service import ( @@ -53,6 +73,7 @@ UpdateDataStoreRequest, ) from .document import Document +from .document_processing_config import DocumentProcessingConfig from .document_service import ( CreateDocumentRequest, DeleteDocumentRequest, @@ -72,6 +93,12 @@ ListEnginesResponse, UpdateEngineRequest, ) +from .grounded_generation_service import ( + CheckGroundingRequest, + CheckGroundingResponse, + CheckGroundingSpec, +) +from .grounding import FactChunk, GroundingFact from .import_config import ( BigQuerySource, BigtableOptions, @@ -92,6 +119,8 @@ ImportUserEventsResponse, SpannerSource, ) +from .project import Project +from .project_service import ProvisionProjectMetadata, ProvisionProjectRequest from .purge_config import ( PurgeDocumentsMetadata, PurgeDocumentsRequest, @@ -100,6 +129,7 @@ PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, ) +from .rank_service import RankingRecord, RankRequest, RankResponse from .recommendation_service import RecommendRequest, RecommendResponse from .schema import Schema from .schema_service import ( @@ -114,6 +144,7 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .session import Query, Session from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .site_search_engine_service import ( BatchCreateTargetSiteMetadata, @@ -157,29 +188,48 @@ from .user_event_service import CollectUserEventRequest, WriteUserEventRequest __all__ = ( + "Answer", "CustomAttribute", "Interval", "UserInfo", "IndustryVertical", "SearchAddOn", "SearchTier", + "SearchUseCase", "SolutionType", "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", + "Condition", + "Control", + "CreateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + "UpdateControlRequest", "Conversation", "ConversationContext", "ConversationMessage", "Reply", "TextInput", + "AnswerQueryRequest", + "AnswerQueryResponse", "ConverseConversationRequest", "ConverseConversationResponse", "CreateConversationRequest", + "CreateSessionRequest", "DeleteConversationRequest", + "DeleteSessionRequest", + "GetAnswerRequest", "GetConversationRequest", + "GetSessionRequest", "ListConversationsRequest", "ListConversationsResponse", + "ListSessionsRequest", + "ListSessionsResponse", "UpdateConversationRequest", + "UpdateSessionRequest", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -190,6 +240,7 @@ "ListDataStoresResponse", "UpdateDataStoreRequest", "Document", + "DocumentProcessingConfig", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -205,6 +256,11 @@ "ListEnginesRequest", "ListEnginesResponse", "UpdateEngineRequest", + "CheckGroundingRequest", + "CheckGroundingResponse", + "CheckGroundingSpec", + "FactChunk", + "GroundingFact", "BigQuerySource", "BigtableOptions", "BigtableSource", @@ -223,12 +279,18 @@ "ImportUserEventsRequest", "ImportUserEventsResponse", "SpannerSource", + "Project", + "ProvisionProjectMetadata", + "ProvisionProjectRequest", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "RankingRecord", + "RankRequest", + "RankResponse", "RecommendRequest", "RecommendResponse", "Schema", @@ -243,6 +305,8 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "Query", + "Session", "SiteSearchEngine", "SiteVerificationInfo", "TargetSite", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py new file mode 100644 index 000000000000..e01ee1c0385f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py @@ -0,0 +1,638 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Answer", + }, +) + + +class Answer(proto.Message): + r"""Defines an answer. + + Attributes: + name (str): + Immutable. Fully qualified name + ``projects/{project}/locations/global/collections/{collection}/engines/{engine}/sessions/*/answers/*`` + state (google.cloud.discoveryengine_v1.types.Answer.State): + The state of the answer generation. + answer_text (str): + The textual answer. + citations (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Citation]): + Citations. + references (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Reference]): + References. + related_questions (MutableSequence[str]): + Suggested related questions. + steps (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Step]): + Answer generation steps. + query_understanding_info (google.cloud.discoveryengine_v1.types.Answer.QueryUnderstandingInfo): + Query understanding information. + answer_skipped_reasons (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.AnswerSkippedReason]): + Additional answer-skipped reasons. This + provides the reason for ignored cases. If + nothing is skipped, this field is not set. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Answer creation timestamp. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Answer completed timestamp. + """ + + class State(proto.Enum): + r"""Enumeration of the state of the answer generation. + + Values: + STATE_UNSPECIFIED (0): + Unknown. + IN_PROGRESS (1): + Answer generation is currently in progress. + FAILED (2): + Answer generation currently failed. + SUCCEEDED (3): + Answer generation has succeeded. + """ + STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + FAILED = 2 + SUCCEEDED = 3 + + class AnswerSkippedReason(proto.Enum): + r"""An enum for answer skipped reasons. + + Values: + ANSWER_SKIPPED_REASON_UNSPECIFIED (0): + Default value. The answer skipped reason is + not specified. + ADVERSARIAL_QUERY_IGNORED (1): + The adversarial query ignored case. + NON_ANSWER_SEEKING_QUERY_IGNORED (2): + The non-answer seeking query ignored case. + OUT_OF_DOMAIN_QUERY_IGNORED (3): + The out-of-domain query ignored case. + + Google skips the answer if there are no + high-relevance search results. + POTENTIAL_POLICY_VIOLATION (4): + The potential policy violation case. + + Google skips the answer if there is a potential + policy violation detected. This includes content + that may be violent or toxic. + """ + ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 + ADVERSARIAL_QUERY_IGNORED = 1 + NON_ANSWER_SEEKING_QUERY_IGNORED = 2 + OUT_OF_DOMAIN_QUERY_IGNORED = 3 + POTENTIAL_POLICY_VIOLATION = 4 + + class Citation(proto.Message): + r"""Citation info for a segment. + + Attributes: + start_index (int): + Index indicates the start of the segment, + measured in bytes (UTF-8 unicode). + end_index (int): + End of the attributed segment, exclusive. + sources (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.CitationSource]): + Citation sources for the attributed segment. + """ + + start_index: int = proto.Field( + proto.INT64, + number=1, + ) + end_index: int = proto.Field( + proto.INT64, + number=2, + ) + sources: MutableSequence["Answer.CitationSource"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Answer.CitationSource", + ) + + class CitationSource(proto.Message): + r"""Citation source. + + Attributes: + reference_id (str): + ID of the citation source. + """ + + reference_id: str = proto.Field( + proto.STRING, + number=1, + ) + + class Reference(proto.Message): + r"""Reference. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + unstructured_document_info (google.cloud.discoveryengine_v1.types.Answer.Reference.UnstructuredDocumentInfo): + Unstructured document information. + + This field is a member of `oneof`_ ``content``. + chunk_info (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo): + Chunk information. + + This field is a member of `oneof`_ ``content``. + """ + + class UnstructuredDocumentInfo(proto.Message): + r"""Unstructured document information. + + Attributes: + document (str): + Document resource name. + uri (str): + URI for the document. + title (str): + Title. + chunk_contents (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Reference.UnstructuredDocumentInfo.ChunkContent]): + List of cited chunk contents derived from + document content. + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON metadata for the + document. It is populated from the struct data + from the Chunk in search result. + """ + + class ChunkContent(proto.Message): + r"""Chunk content. + + Attributes: + content (str): + Chunk textual content. + page_identifier (str): + Page identifier. + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + page_identifier: str = proto.Field( + proto.STRING, + number=2, + ) + + document: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + title: str = proto.Field( + proto.STRING, + number=3, + ) + chunk_contents: MutableSequence[ + "Answer.Reference.UnstructuredDocumentInfo.ChunkContent" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Answer.Reference.UnstructuredDocumentInfo.ChunkContent", + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + class ChunkInfo(proto.Message): + r"""Chunk information. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + chunk (str): + Chunk resource name. + content (str): + Chunk textual content. + relevance_score (float): + Relevance score. + + This field is a member of `oneof`_ ``_relevance_score``. + document_metadata (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo.DocumentMetadata): + Document metadata. + """ + + class DocumentMetadata(proto.Message): + r"""Document metadata. + + Attributes: + document (str): + Document resource name. + uri (str): + URI for the document. + title (str): + Title. + page_identifier (str): + Page identifier. + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON metadata for the + document. It is populated from the struct data + from the Chunk in search result. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + title: str = proto.Field( + proto.STRING, + number=3, + ) + page_identifier: str = proto.Field( + proto.STRING, + number=4, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + chunk: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + document_metadata: "Answer.Reference.ChunkInfo.DocumentMetadata" = ( + proto.Field( + proto.MESSAGE, + number=4, + message="Answer.Reference.ChunkInfo.DocumentMetadata", + ) + ) + + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=1, + oneof="content", + message="Answer.Reference.UnstructuredDocumentInfo", + ) + ) + chunk_info: "Answer.Reference.ChunkInfo" = proto.Field( + proto.MESSAGE, + number=2, + oneof="content", + message="Answer.Reference.ChunkInfo", + ) + + class Step(proto.Message): + r"""Step information. + + Attributes: + state (google.cloud.discoveryengine_v1.types.Answer.Step.State): + The state of the step. + description (str): + The description of the step. + thought (str): + The thought of the step. + actions (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Step.Action]): + Actions. + """ + + class State(proto.Enum): + r"""Enumeration of the state of the step. + + Values: + STATE_UNSPECIFIED (0): + Unknown. + IN_PROGRESS (1): + Step is currently in progress. + FAILED (2): + Step currently failed. + SUCCEEDED (3): + Step has succeeded. + """ + STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + FAILED = 2 + SUCCEEDED = 3 + + class Action(proto.Message): + r"""Action. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_action (google.cloud.discoveryengine_v1.types.Answer.Step.Action.SearchAction): + Search action. + + This field is a member of `oneof`_ ``action``. + observation (google.cloud.discoveryengine_v1.types.Answer.Step.Action.Observation): + Observation. + """ + + class SearchAction(proto.Message): + r"""Search action. + + Attributes: + query (str): + The query to search. + """ + + query: str = proto.Field( + proto.STRING, + number=1, + ) + + class Observation(proto.Message): + r"""Observation. + + Attributes: + search_results (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Step.Action.Observation.SearchResult]): + Search results observed by the search action, + it can be snippets info or chunk info, depending + on the citation type set by the user. + """ + + class SearchResult(proto.Message): + r""" + + Attributes: + document (str): + Document resource name. + uri (str): + URI for the document. + title (str): + Title. + snippet_info (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Step.Action.Observation.SearchResult.SnippetInfo]): + If citation_type is DOCUMENT_LEVEL_CITATION, populate + document level snippets. + chunk_info (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.Step.Action.Observation.SearchResult.ChunkInfo]): + If citation_type is CHUNK_LEVEL_CITATION and chunk mode is + on, populate chunk info. + """ + + class SnippetInfo(proto.Message): + r"""Snippet information. + + Attributes: + snippet (str): + Snippet content. + snippet_status (str): + Status of the snippet defined by the search + team. + """ + + snippet: str = proto.Field( + proto.STRING, + number=1, + ) + snippet_status: str = proto.Field( + proto.STRING, + number=2, + ) + + class ChunkInfo(proto.Message): + r"""Chunk information. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + chunk (str): + Chunk resource name. + content (str): + Chunk textual content. + relevance_score (float): + Relevance score. + + This field is a member of `oneof`_ ``_relevance_score``. + """ + + chunk: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + + document: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + title: str = proto.Field( + proto.STRING, + number=3, + ) + snippet_info: MutableSequence[ + "Answer.Step.Action.Observation.SearchResult.SnippetInfo" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Answer.Step.Action.Observation.SearchResult.SnippetInfo", + ) + chunk_info: MutableSequence[ + "Answer.Step.Action.Observation.SearchResult.ChunkInfo" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Answer.Step.Action.Observation.SearchResult.ChunkInfo", + ) + + search_results: MutableSequence[ + "Answer.Step.Action.Observation.SearchResult" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Answer.Step.Action.Observation.SearchResult", + ) + + search_action: "Answer.Step.Action.SearchAction" = proto.Field( + proto.MESSAGE, + number=2, + oneof="action", + message="Answer.Step.Action.SearchAction", + ) + observation: "Answer.Step.Action.Observation" = proto.Field( + proto.MESSAGE, + number=3, + message="Answer.Step.Action.Observation", + ) + + state: "Answer.Step.State" = proto.Field( + proto.ENUM, + number=1, + enum="Answer.Step.State", + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + thought: str = proto.Field( + proto.STRING, + number=3, + ) + actions: MutableSequence["Answer.Step.Action"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Answer.Step.Action", + ) + + class QueryUnderstandingInfo(proto.Message): + r"""Query understanding information. + + Attributes: + query_classification_info (MutableSequence[google.cloud.discoveryengine_v1.types.Answer.QueryUnderstandingInfo.QueryClassificationInfo]): + Query classification information. + """ + + class QueryClassificationInfo(proto.Message): + r"""Query classification information. + + Attributes: + type_ (google.cloud.discoveryengine_v1.types.Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type): + Query classification type. + positive (bool): + Classification output. + """ + + class Type(proto.Enum): + r"""Query classification types. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified query classification type. + ADVERSARIAL_QUERY (1): + Adversarial query classification type. + NON_ANSWER_SEEKING_QUERY (2): + Non-answer-seeking query classification type. + """ + TYPE_UNSPECIFIED = 0 + ADVERSARIAL_QUERY = 1 + NON_ANSWER_SEEKING_QUERY = 2 + + type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( + proto.Field( + proto.ENUM, + number=1, + enum="Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type", + ) + ) + positive: bool = proto.Field( + proto.BOOL, + number=2, + ) + + query_classification_info: MutableSequence[ + "Answer.QueryUnderstandingInfo.QueryClassificationInfo" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Answer.QueryUnderstandingInfo.QueryClassificationInfo", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + answer_text: str = proto.Field( + proto.STRING, + number=3, + ) + citations: MutableSequence[Citation] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Citation, + ) + references: MutableSequence[Reference] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Reference, + ) + related_questions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + steps: MutableSequence[Step] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=Step, + ) + query_understanding_info: QueryUnderstandingInfo = proto.Field( + proto.MESSAGE, + number=10, + message=QueryUnderstandingInfo, + ) + answer_skipped_reasons: MutableSequence[AnswerSkippedReason] = proto.RepeatedField( + proto.ENUM, + number=11, + enum=AnswerSkippedReason, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py index 0dc2c5d3c229..1c61c2369002 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py @@ -24,6 +24,7 @@ manifest={ "IndustryVertical", "SolutionType", + "SearchUseCase", "SearchTier", "SearchAddOn", "Interval", @@ -80,6 +81,26 @@ class SolutionType(proto.Enum): SOLUTION_TYPE_GENERATIVE_CHAT = 4 +class SearchUseCase(proto.Enum): + r"""Defines a further subdivision of ``SolutionType``. Specifically + applies to + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_SEARCH]. + + Values: + SEARCH_USE_CASE_UNSPECIFIED (0): + Value used when unset. Will not occur in CSS. + SEARCH_USE_CASE_SEARCH (1): + Search use case. Expects the traffic has a non-empty + [query][google.cloud.discoveryengine.v1.SearchRequest.query]. + SEARCH_USE_CASE_BROWSE (2): + Browse use case. Expects the traffic has an empty + [query][google.cloud.discoveryengine.v1.SearchRequest.query]. + """ + SEARCH_USE_CASE_UNSPECIFIED = 0 + SEARCH_USE_CASE_SEARCH = 1 + SEARCH_USE_CASE_BROWSE = 2 + + class SearchTier(proto.Enum): r"""Tiers of search features. Different tiers might have different pricing. To learn more, check the pricing diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py new file mode 100644 index 000000000000..db871cdc66b1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Condition", + "Control", + }, +) + + +class Condition(proto.Message): + r"""Defines circumstances to be checked before allowing a + behavior + + Attributes: + query_terms (MutableSequence[google.cloud.discoveryengine_v1.types.Condition.QueryTerm]): + Search only + A list of terms to match the query on. + + Maximum of 10 query terms. + active_time_range (MutableSequence[google.cloud.discoveryengine_v1.types.Condition.TimeRange]): + Range of time(s) specifying when condition is + active. + Maximum of 10 time ranges. + """ + + class QueryTerm(proto.Message): + r"""Matcher for search request query + + Attributes: + value (str): + The specific query value to match against + + Must be lowercase, must be UTF-8. Can have at most 3 space + separated terms if full_match is true. Cannot be an empty + string. Maximum length of 5000 characters. + full_match (bool): + Whether the search query needs to exactly + match the query term. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + full_match: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class TimeRange(proto.Message): + r"""Used for time-dependent conditions. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start of time range. + + Range is inclusive. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End of time range. + + Range is inclusive. + Must be in the future. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + query_terms: MutableSequence[QueryTerm] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=QueryTerm, + ) + active_time_range: MutableSequence[TimeRange] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TimeRange, + ) + + +class Control(proto.Message): + r"""Defines a conditioned behavior to employ during serving. Must be + attached to a [ServingConfig][] to be considered at serving time. + Permitted actions dependent on ``SolutionType``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + boost_action (google.cloud.discoveryengine_v1.types.Control.BoostAction): + Defines a boost-type control + + This field is a member of `oneof`_ ``action``. + filter_action (google.cloud.discoveryengine_v1.types.Control.FilterAction): + Defines a filter-type control + Currently not supported by Recommendation + + This field is a member of `oneof`_ ``action``. + redirect_action (google.cloud.discoveryengine_v1.types.Control.RedirectAction): + Defines a redirect-type control. + + This field is a member of `oneof`_ ``action``. + synonyms_action (google.cloud.discoveryengine_v1.types.Control.SynonymsAction): + Treats a group of terms as synonyms of one + another. + + This field is a member of `oneof`_ ``action``. + name (str): + Immutable. Fully qualified name + ``projects/*/locations/global/dataStore/*/controls/*`` + display_name (str): + Required. Human readable name. The identifier + used in UI views. + Must be UTF-8 encoded string. Length limit is + 128 characters. Otherwise an INVALID ARGUMENT + error is thrown. + associated_serving_config_ids (MutableSequence[str]): + Output only. List of all [ServingConfig][] ids this control + is attached to. May take up to 10 minutes to update after + changes. + solution_type (google.cloud.discoveryengine_v1.types.SolutionType): + Required. Immutable. What solution the + control belongs to. + Must be compatible with vertical of resource. + Otherwise an INVALID ARGUMENT error is thrown. + use_cases (MutableSequence[google.cloud.discoveryengine_v1.types.SearchUseCase]): + Specifies the use case for the control. Affects what + condition fields can be set. Only applies to + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_SEARCH]. + Currently only allow one use case per control. Must be set + when solution_type is + [SolutionType.SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_SEARCH]. + conditions (MutableSequence[google.cloud.discoveryengine_v1.types.Condition]): + Determines when the associated action will + trigger. + Omit to always apply the action. + Currently only a single condition may be + specified. Otherwise an INVALID ARGUMENT error + is thrown. + """ + + class BoostAction(proto.Message): + r"""Adjusts order of products in returned list. + + Attributes: + boost (float): + Required. Strength of the boost, which should be in [-1, 1]. + Negative boost means demotion. Default is 0.0 (No-op). + filter (str): + Required. Specifies which products to apply + the boost to. + If no filter is provided all products will be + boosted (No-op). Syntax documentation: + + https://cloud.google.com/retail/docs/filter-and-order + Maximum length is 5000 characters. + Otherwise an INVALID ARGUMENT error is thrown. + data_store (str): + Required. Specifies which data store's documents can be + boosted by this control. Full data store name e.g. + projects/123/locations/global/collections/default_collection/dataStores/default_data_store + """ + + boost: float = proto.Field( + proto.FLOAT, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + data_store: str = proto.Field( + proto.STRING, + number=3, + ) + + class FilterAction(proto.Message): + r"""Specified which products may be included in results. + Uses same filter as boost. + + Attributes: + filter (str): + Required. A filter to apply on the matching + condition results. + Required + Syntax documentation: + + https://cloud.google.com/retail/docs/filter-and-order + Maximum length is 5000 characters. Otherwise an + INVALID ARGUMENT error is thrown. + data_store (str): + Required. Specifies which data store's documents can be + filtered by this control. Full data store name e.g. + projects/123/locations/global/collections/default_collection/dataStores/default_data_store + """ + + filter: str = proto.Field( + proto.STRING, + number=1, + ) + data_store: str = proto.Field( + proto.STRING, + number=2, + ) + + class RedirectAction(proto.Message): + r"""Redirects a shopper to the provided URI. + + Attributes: + redirect_uri (str): + Required. The URI to which the shopper will + be redirected. + Required. + URI must have length equal or less than 2000 + characters. Otherwise an INVALID ARGUMENT error + is thrown. + """ + + redirect_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + class SynonymsAction(proto.Message): + r"""Creates a set of terms that will act as synonyms of one + another. + Example: "happy" will also be considered as "glad", "glad" will + also be considered as "happy". + + Attributes: + synonyms (MutableSequence[str]): + Defines a set of synonyms. + Can specify up to 100 synonyms. + Must specify at least 2 synonyms. Otherwise an + INVALID ARGUMENT error is thrown. + """ + + synonyms: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + boost_action: BoostAction = proto.Field( + proto.MESSAGE, + number=6, + oneof="action", + message=BoostAction, + ) + filter_action: FilterAction = proto.Field( + proto.MESSAGE, + number=7, + oneof="action", + message=FilterAction, + ) + redirect_action: RedirectAction = proto.Field( + proto.MESSAGE, + number=9, + oneof="action", + message=RedirectAction, + ) + synonyms_action: SynonymsAction = proto.Field( + proto.MESSAGE, + number=10, + oneof="action", + message=SynonymsAction, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + associated_serving_config_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + solution_type: common.SolutionType = proto.Field( + proto.ENUM, + number=4, + enum=common.SolutionType, + ) + use_cases: MutableSequence[common.SearchUseCase] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=common.SearchUseCase, + ) + conditions: MutableSequence["Condition"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Condition", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control_service.py new file mode 100644 index 000000000000..5b85d46e6efc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control_service.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import control as gcd_control + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CreateControlRequest", + "UpdateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + }, +) + + +class CreateControlRequest(proto.Message): + r"""Request for CreateControl method. + + Attributes: + parent (str): + Required. Full resource name of parent data store. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + control (google.cloud.discoveryengine_v1.types.Control): + Required. The Control to create. + control_id (str): + Required. The ID to use for the Control, which will become + the final component of the Control's resource name. + + This value must be within 1-63 characters. Valid characters + are /[a-z][0-9]-_/. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + control: gcd_control.Control = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_control.Control, + ) + control_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateControlRequest(proto.Message): + r"""Request for UpdateControl method. + + Attributes: + control (google.cloud.discoveryengine_v1.types.Control): + Required. The Control to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1.Control] to + update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type] + + If not set or empty, all supported fields are updated. + """ + + control: gcd_control.Control = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_control.Control, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteControlRequest(proto.Message): + r"""Request for DeleteControl method. + + Attributes: + name (str): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetControlRequest(proto.Message): + r"""Request for GetControl method. + + Attributes: + name (str): + Required. The resource name of the Control to get. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListControlsRequest(proto.Message): + r"""Request for ListControls method. + + Attributes: + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + page_size (int): + Optional. Maximum number of results to + return. If unspecified, defaults to 50. Max + allowed value is 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListControls`` call. Provide this to retrieve the + subsequent page. + filter (str): + Optional. A filter to apply on the list results. Supported + features: + + - List all the products under the parent branch if + [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] + is unset. Currently this field is unsupported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListControlsResponse(proto.Message): + r"""Response for ListControls method. + + Attributes: + controls (MutableSequence[google.cloud.discoveryengine_v1.types.Control]): + All the Controls for a given data store. + next_page_token (str): + Pagination token, if not returned indicates + the last page. + """ + + @property + def raw_page(self): + return self + + controls: MutableSequence[gcd_control.Control] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_control.Control, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py index 75a58da2a3b4..1f92a8b8ea5c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py @@ -40,9 +40,9 @@ class Conversation(proto.Message): Attributes: name (str): Immutable. Fully qualified name - ``project/*/locations/global/collections/{collection}/dataStore/*/conversations/*`` + ``projects/{project}/locations/global/collections/{collection}/dataStore/*/conversations/*`` or - ``project/*/locations/global/collections/{collection}/engines/*/conversations/*``. + ``projects/{project}/locations/global/collections/{collection}/engines/*/conversations/*``. state (google.cloud.discoveryengine_v1.types.Conversation.State): The state of the Conversation. user_pseudo_id (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py index 54f131c0a7a3..f7eb3d244f2b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py @@ -21,7 +21,9 @@ import proto # type: ignore from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer as gcd_answer from google.cloud.discoveryengine_v1.types import search_service +from google.cloud.discoveryengine_v1.types import session as gcd_session __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", @@ -34,6 +36,15 @@ "GetConversationRequest", "ListConversationsRequest", "ListConversationsResponse", + "AnswerQueryRequest", + "AnswerQueryResponse", + "GetAnswerRequest", + "CreateSessionRequest", + "UpdateSessionRequest", + "DeleteSessionRequest", + "GetSessionRequest", + "ListSessionsRequest", + "ListSessionsResponse", }, ) @@ -356,4 +367,823 @@ def raw_page(self): ) +class AnswerQueryRequest(proto.Message): + r"""Request message for + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + + Attributes: + serving_config (str): + Required. The resource name of the Search serving config, + such as + ``projects/*/locations/global/collections/default_collection/engines/*/servingConfigs/default_serving_config``, + or + ``projects/*/locations/global/collections/default_collection/dataStores/*/servingConfigs/default_serving_config``. + This field is used to identify the serving configuration + name, set of models used to make the search. + query (google.cloud.discoveryengine_v1.types.Query): + Required. Current user query. + session (str): + The session resource name. Not required. + + When session field is not set, the API is in sessionless + mode. + + We support auto session mode: users can use the wildcard + symbol ``-`` as session ID. A new ID will be automatically + generated and assigned. + safety_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SafetySpec): + Model specification. + related_questions_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.RelatedQuestionsSpec): + Related questions specification. + answer_generation_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.AnswerGenerationSpec): + Answer generation specification. + search_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec): + Search specification. + query_understanding_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.QueryUnderstandingSpec): + Query understanding specification. + asynchronous_mode (bool): + Asynchronous mode control. + + If enabled, the response will be returned with + answer/session resource name without final answer. The API + users need to do the polling to get the latest status of + answer/session by calling + [ConversationalSearchService.GetAnswer][google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer] + or + [ConversationalSearchService.GetSession][google.cloud.discoveryengine.v1.ConversationalSearchService.GetSession] + method. + user_pseudo_id (str): + A unique identifier for tracking visitors. For example, this + could be implemented with an HTTP cookie, which should be + able to uniquely identify a visitor on a single device. This + unique identifier should not change if the visitor logs in + or out of the website. + + This field should NOT have a fixed value such as + ``unknown_visitor``. + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. + """ + + class SafetySpec(proto.Message): + r"""Safety specification. + + Attributes: + enable (bool): + Enable the safety filtering on the answer + response. It is false by default. + """ + + enable: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class RelatedQuestionsSpec(proto.Message): + r"""Related questions specification. + + Attributes: + enable (bool): + Enable related questions feature if true. + """ + + enable: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class AnswerGenerationSpec(proto.Message): + r"""Answer generation specification. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.AnswerGenerationSpec.ModelSpec): + Answer generation model specification. + prompt_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.AnswerGenerationSpec.PromptSpec): + Answer generation prompt specification. + include_citations (bool): + Specifies whether to include citation metadata in the + answer. The default value is ``false``. + answer_language_code (str): + Language code for Answer. Use language tags defined by + `BCP47 `__. + Note: This is an experimental feature. + ignore_adversarial_query (bool): + Specifies whether to filter out adversarial queries. The + default value is ``false``. + + Google employs search-query classification to detect + adversarial queries. No answer is returned if the search + query is classified as an adversarial query. For example, a + user might ask a question regarding negative comments about + the company or submit a query designed to generate unsafe, + policy-violating output. If this field is set to ``true``, + we skip generating answers for adversarial queries and + return fallback messages instead. + ignore_non_answer_seeking_query (bool): + Specifies whether to filter out queries that are not + answer-seeking. The default value is ``false``. + + Google employs search-query classification to detect + answer-seeking queries. No answer is returned if the search + query is classified as a non-answer seeking query. If this + field is set to ``true``, we skip generating answers for + non-answer seeking queries and return fallback messages + instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true`` or unset, the behavior will be determined + automatically by the service. + + This field is a member of `oneof`_ ``_ignore_low_relevant_content``. + """ + + class ModelSpec(proto.Message): + r"""Answer Generation Model specification. + + Attributes: + model_version (str): + Model version. If not set, it will use the + default stable model. Allowed values are: + stable, preview. + """ + + model_version: str = proto.Field( + proto.STRING, + number=1, + ) + + class PromptSpec(proto.Message): + r"""Answer generation prompt specification. + + Attributes: + preamble (str): + Customized preamble. + """ + + preamble: str = proto.Field( + proto.STRING, + number=1, + ) + + model_spec: "AnswerQueryRequest.AnswerGenerationSpec.ModelSpec" = proto.Field( + proto.MESSAGE, + number=1, + message="AnswerQueryRequest.AnswerGenerationSpec.ModelSpec", + ) + prompt_spec: "AnswerQueryRequest.AnswerGenerationSpec.PromptSpec" = proto.Field( + proto.MESSAGE, + number=2, + message="AnswerQueryRequest.AnswerGenerationSpec.PromptSpec", + ) + include_citations: bool = proto.Field( + proto.BOOL, + number=3, + ) + answer_language_code: str = proto.Field( + proto.STRING, + number=4, + ) + ignore_adversarial_query: bool = proto.Field( + proto.BOOL, + number=5, + ) + ignore_non_answer_seeking_query: bool = proto.Field( + proto.BOOL, + number=6, + ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) + + class SearchSpec(proto.Message): + r"""Search specification. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_params (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchParams): + Search parameters. + + This field is a member of `oneof`_ ``input``. + search_result_list (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList): + Search result list. + + This field is a member of `oneof`_ ``input``. + """ + + class SearchParams(proto.Message): + r"""Search parameters. + + Attributes: + max_return_results (int): + Number of search results to return. + The default value is 10. + filter (str): + The filter syntax consists of an expression language for + constructing a predicate from one or more fields of the + documents being filtered. Filter expression is + case-sensitive. This will be used to filter search results + which may affect the Answer response. + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + + Filtering in Vertex AI Search is done by mapping the LHS + filter key to a key property defined in the Vertex AI Search + backend -- this mapping is defined by the customer in their + schema. For example a media customers might have a field + 'name' in their schema. In this case the filter would look + like this: filter --> name:'ANY("king kong")' + + For more information about filtering including syntax and + filter operators, see + `Filter `__ + boost_spec (google.cloud.discoveryengine_v1.types.SearchRequest.BoostSpec): + Boost specification to boost certain documents in search + results which may affect the answer query response. For more + information on boosting, see + `Boosting `__ + order_by (str): + The order in which documents are returned. Documents can be + ordered by a field in an + [Document][google.cloud.discoveryengine.v1.Document] object. + Leave it unset if ordered by relevance. ``order_by`` + expression is case-sensitive. For more information on + ordering, see + `Ordering `__ + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + data_store_specs (MutableSequence[google.cloud.discoveryengine_v1.types.SearchRequest.DataStoreSpec]): + Specs defining dataStores to filter on in a + search call and configurations for those + dataStores. This is only considered for engines + with multiple dataStores use case. For single + dataStore within an engine, they should use the + specs at the top level. + """ + + max_return_results: int = proto.Field( + proto.INT32, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + boost_spec: search_service.SearchRequest.BoostSpec = proto.Field( + proto.MESSAGE, + number=3, + message=search_service.SearchRequest.BoostSpec, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + data_store_specs: MutableSequence[ + search_service.SearchRequest.DataStoreSpec + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=search_service.SearchRequest.DataStoreSpec, + ) + + class SearchResultList(proto.Message): + r"""Search result list. + + Attributes: + search_results (MutableSequence[google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult]): + Search results. + """ + + class SearchResult(proto.Message): + r"""Search result. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + unstructured_document_info (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo): + Unstructured document information. + + This field is a member of `oneof`_ ``content``. + chunk_info (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.ChunkInfo): + Chunk information. + + This field is a member of `oneof`_ ``content``. + """ + + class UnstructuredDocumentInfo(proto.Message): + r"""Unstructured document information. + + Attributes: + document (str): + Document resource name. + uri (str): + URI for the document. + title (str): + Title. + document_contexts (MutableSequence[google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.DocumentContext]): + List of document contexts. + extractive_segments (MutableSequence[google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.ExtractiveSegment]): + List of extractive segments. + extractive_answers (MutableSequence[google.cloud.discoveryengine_v1.types.AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.ExtractiveAnswer]): + List of extractive answers. + """ + + class DocumentContext(proto.Message): + r"""Document context. + + Attributes: + page_identifier (str): + Page identifier. + content (str): + Document content. + """ + + page_identifier: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExtractiveSegment(proto.Message): + r"""Extractive segment. + `Guide `__ + + Attributes: + page_identifier (str): + Page identifier. + content (str): + Extractive segment content. + """ + + page_identifier: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExtractiveAnswer(proto.Message): + r"""Extractive answer. + `Guide `__ + + Attributes: + page_identifier (str): + Page identifier. + content (str): + Extractive answer content. + """ + + page_identifier: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + document: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + title: str = proto.Field( + proto.STRING, + number=3, + ) + document_contexts: MutableSequence[ + "AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.DocumentContext" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.DocumentContext", + ) + extractive_segments: MutableSequence[ + "AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.ExtractiveSegment" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.ExtractiveSegment", + ) + extractive_answers: MutableSequence[ + "AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.ExtractiveAnswer" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo.ExtractiveAnswer", + ) + + class ChunkInfo(proto.Message): + r"""Chunk information. + + Attributes: + chunk (str): + Chunk resource name. + content (str): + Chunk textual content. + """ + + chunk: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + unstructured_document_info: "AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo" = proto.Field( + proto.MESSAGE, + number=1, + oneof="content", + message="AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.UnstructuredDocumentInfo", + ) + chunk_info: "AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.ChunkInfo" = proto.Field( + proto.MESSAGE, + number=2, + oneof="content", + message="AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult.ChunkInfo", + ) + + search_results: MutableSequence[ + "AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AnswerQueryRequest.SearchSpec.SearchResultList.SearchResult", + ) + + search_params: "AnswerQueryRequest.SearchSpec.SearchParams" = proto.Field( + proto.MESSAGE, + number=1, + oneof="input", + message="AnswerQueryRequest.SearchSpec.SearchParams", + ) + search_result_list: "AnswerQueryRequest.SearchSpec.SearchResultList" = ( + proto.Field( + proto.MESSAGE, + number=2, + oneof="input", + message="AnswerQueryRequest.SearchSpec.SearchResultList", + ) + ) + + class QueryUnderstandingSpec(proto.Message): + r"""Query understanding specification. + + Attributes: + query_classification_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec): + Query classification specification. + query_rephraser_spec (google.cloud.discoveryengine_v1.types.AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec): + Query rephraser specification. + """ + + class QueryClassificationSpec(proto.Message): + r"""Query classification specification. + + Attributes: + types (MutableSequence[google.cloud.discoveryengine_v1.types.AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type]): + Enabled query classification types. + """ + + class Type(proto.Enum): + r"""Query classification types. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified query classification type. + ADVERSARIAL_QUERY (1): + Adversarial query classification type. + NON_ANSWER_SEEKING_QUERY (2): + Non-answer-seeking query classification type. + """ + TYPE_UNSPECIFIED = 0 + ADVERSARIAL_QUERY = 1 + NON_ANSWER_SEEKING_QUERY = 2 + + types: MutableSequence[ + "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type", + ) + + class QueryRephraserSpec(proto.Message): + r"""Query rephraser specification. + + Attributes: + disable (bool): + Disable query rephraser. + """ + + disable: bool = proto.Field( + proto.BOOL, + number=1, + ) + + query_classification_spec: "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec" = proto.Field( + proto.MESSAGE, + number=1, + message="AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec", + ) + query_rephraser_spec: "AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec" = proto.Field( + proto.MESSAGE, + number=2, + message="AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec", + ) + + serving_config: str = proto.Field( + proto.STRING, + number=1, + ) + query: gcd_session.Query = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_session.Query, + ) + session: str = proto.Field( + proto.STRING, + number=3, + ) + safety_spec: SafetySpec = proto.Field( + proto.MESSAGE, + number=4, + message=SafetySpec, + ) + related_questions_spec: RelatedQuestionsSpec = proto.Field( + proto.MESSAGE, + number=5, + message=RelatedQuestionsSpec, + ) + answer_generation_spec: AnswerGenerationSpec = proto.Field( + proto.MESSAGE, + number=7, + message=AnswerGenerationSpec, + ) + search_spec: SearchSpec = proto.Field( + proto.MESSAGE, + number=8, + message=SearchSpec, + ) + query_understanding_spec: QueryUnderstandingSpec = proto.Field( + proto.MESSAGE, + number=9, + message=QueryUnderstandingSpec, + ) + asynchronous_mode: bool = proto.Field( + proto.BOOL, + number=10, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=12, + ) + + +class AnswerQueryResponse(proto.Message): + r"""Response message for + [ConversationalSearchService.AnswerQuery][google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery] + method. + + Attributes: + answer (google.cloud.discoveryengine_v1.types.Answer): + Answer resource object. If + [AnswerQueryRequest.StepSpec.max_step_count][] is greater + than 1, use + [Answer.name][google.cloud.discoveryengine.v1.Answer.name] + to fetch answer information using + [ConversationalSearchService.GetAnswer][google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer] + API. + session (google.cloud.discoveryengine_v1.types.Session): + Session resource object. It will be only available when + session field is set and valid in the + [AnswerQueryRequest][google.cloud.discoveryengine.v1.AnswerQueryRequest] + request. + answer_query_token (str): + A global unique ID used for logging. + """ + + answer: gcd_answer.Answer = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_answer.Answer, + ) + session: gcd_session.Session = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_session.Session, + ) + answer_query_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetAnswerRequest(proto.Message): + r"""Request for GetAnswer method. + + Attributes: + name (str): + Required. The resource name of the Answer to get. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/engines/{engine_id}/sessions/{session_id}/answers/{answer_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSessionRequest(proto.Message): + r"""Request for CreateSession method. + + Attributes: + parent (str): + Required. Full resource name of parent data store. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}`` + session (google.cloud.discoveryengine_v1.types.Session): + Required. The session to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + session: gcd_session.Session = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_session.Session, + ) + + +class UpdateSessionRequest(proto.Message): + r"""Request for UpdateSession method. + + Attributes: + session (google.cloud.discoveryengine_v1.types.Session): + Required. The Session to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [Session][google.cloud.discoveryengine.v1.Session] to + update. The following are NOT supported: + + - [Session.name][google.cloud.discoveryengine.v1.Session.name] + + If not set or empty, all supported fields are updated. + """ + + session: gcd_session.Session = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_session.Session, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSessionRequest(proto.Message): + r"""Request for DeleteSession method. + + Attributes: + name (str): + Required. The resource name of the Session to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}/sessions/{session_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetSessionRequest(proto.Message): + r"""Request for GetSession method. + + Attributes: + name (str): + Required. The resource name of the Session to get. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}/sessions/{session_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSessionsRequest(proto.Message): + r"""Request for ListSessions method. + + Attributes: + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection}/dataStores/{data_store_id}`` + page_size (int): + Maximum number of results to return. If + unspecified, defaults to 50. Max allowed value + is 1000. + page_token (str): + A page token, received from a previous ``ListSessions`` + call. Provide this to retrieve the subsequent page. + filter (str): + A filter to apply on the list results. The supported + features are: user_pseudo_id, state. + + Example: "user_pseudo_id = some_id". + order_by (str): + A comma-separated list of fields to order by, sorted in + ascending order. Use "desc" after a field name for + descending. Supported fields: + + - ``update_time`` + - ``create_time`` + - ``session_name`` + + Example: "update_time desc" "create_time". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListSessionsResponse(proto.Message): + r"""Response for ListSessions method. + + Attributes: + sessions (MutableSequence[google.cloud.discoveryengine_v1.types.Session]): + All the Sessions for a given data store. + next_page_token (str): + Pagination token, if not returned indicates + the last page. + """ + + @property + def raw_page(self): + return self + + sessions: MutableSequence[gcd_session.Session] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_session.Session, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py index e251290bb8b0..f64de29d5aa6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py @@ -20,7 +20,11 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.discoveryengine_v1.types import common, schema +from google.cloud.discoveryengine_v1.types import ( + document_processing_config as gcd_document_processing_config, +) +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import schema __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", @@ -72,6 +76,9 @@ class DataStore(proto.Message): Output only. Timestamp the [DataStore][google.cloud.discoveryengine.v1.DataStore] was created at. + document_processing_config (google.cloud.discoveryengine_v1.types.DocumentProcessingConfig): + Configuration for Document understanding and + enrichment. starting_schema (google.cloud.discoveryengine_v1.types.Schema): The start schema to use for this [DataStore][google.cloud.discoveryengine.v1.DataStore] when @@ -144,6 +151,11 @@ class ContentConfig(proto.Enum): number=4, message=timestamp_pb2.Timestamp, ) + document_processing_config: gcd_document_processing_config.DocumentProcessingConfig = proto.Field( + proto.MESSAGE, + number=27, + message=gcd_document_processing_config.DocumentProcessingConfig, + ) starting_schema: schema.Schema = proto.Field( proto.MESSAGE, number=28, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py index 992db260ce1b..89f4d5d68ccc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py @@ -175,8 +175,8 @@ class ListDataStoresRequest(proto.Message): must match the call that provided the page token. Otherwise, an INVALID_ARGUMENT error is returned. filter (str): - Filter by solution type. For example: filter = - 'solution_type:SOLUTION_TYPE_SEARCH' + Filter by solution type . For example: + ``filter = 'solution_type:SOLUTION_TYPE_SEARCH'`` """ parent: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py new file mode 100644 index 000000000000..b989a1010b3d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "DocumentProcessingConfig", + }, +) + + +class DocumentProcessingConfig(proto.Message): + r"""A singleton resource of + [DataStore][google.cloud.discoveryengine.v1.DataStore]. It's empty + when [DataStore][google.cloud.discoveryengine.v1.DataStore] is + created, which defaults to digital parser. The first call to + [DataStoreService.UpdateDocumentProcessingConfig][] method will + initialize the config. + + Attributes: + name (str): + The full resource name of the Document Processing Config. + Format: + ``projects/*/locations/*/collections/*/dataStores/*/documentProcessingConfig``. + default_parsing_config (google.cloud.discoveryengine_v1.types.DocumentProcessingConfig.ParsingConfig): + Configurations for default Document parser. + If not specified, we will configure it as + default DigitalParsingConfig, and the default + parsing config will be applied to all file types + for Document parsing. + parsing_config_overrides (MutableMapping[str, google.cloud.discoveryengine_v1.types.DocumentProcessingConfig.ParsingConfig]): + Map from file type to override the default parsing + configuration based on the file type. Supported keys: + + - ``pdf``: Override parsing config for PDF files, either + digital parsing, ocr parsing or layout parsing is + supported. + - ``html``: Override parsing config for HTML files, only + digital parsing and or layout parsing are supported. + - ``docx``: Override parsing config for DOCX files, only + digital parsing and or layout parsing are supported. + """ + + class ParsingConfig(proto.Message): + r"""Related configurations applied to a specific type of document + parser. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + digital_parsing_config (google.cloud.discoveryengine_v1.types.DocumentProcessingConfig.ParsingConfig.DigitalParsingConfig): + Configurations applied to digital parser. + + This field is a member of `oneof`_ ``type_dedicated_config``. + ocr_parsing_config (google.cloud.discoveryengine_v1.types.DocumentProcessingConfig.ParsingConfig.OcrParsingConfig): + Configurations applied to OCR parser. + Currently it only applies to PDFs. + + This field is a member of `oneof`_ ``type_dedicated_config``. + """ + + class DigitalParsingConfig(proto.Message): + r"""The digital parsing configurations for documents.""" + + class OcrParsingConfig(proto.Message): + r"""The OCR parsing configurations for documents. + + Attributes: + enhanced_document_elements (MutableSequence[str]): + [DEPRECATED] This field is deprecated. To use the additional + enhanced document elements processing, please switch to + ``layout_parsing_config``. + use_native_text (bool): + If true, will use native text instead of OCR + text on pages containing native text. + """ + + enhanced_document_elements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + use_native_text: bool = proto.Field( + proto.BOOL, + number=2, + ) + + digital_parsing_config: "DocumentProcessingConfig.ParsingConfig.DigitalParsingConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type_dedicated_config", + message="DocumentProcessingConfig.ParsingConfig.DigitalParsingConfig", + ) + ocr_parsing_config: "DocumentProcessingConfig.ParsingConfig.OcrParsingConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type_dedicated_config", + message="DocumentProcessingConfig.ParsingConfig.OcrParsingConfig", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + default_parsing_config: ParsingConfig = proto.Field( + proto.MESSAGE, + number=4, + message=ParsingConfig, + ) + parsing_config_overrides: MutableMapping[str, ParsingConfig] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=ParsingConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py index e795a2c3ec06..7e8d2b5f8263 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py @@ -83,7 +83,7 @@ class ListDocumentsRequest(proto.Message): Maximum number of [Document][google.cloud.discoveryengine.v1.Document]s to return. If unspecified, defaults to 100. The maximum allowed - value is 1000. Values above 1000 will be coerced to 1000. + value is 1000. Values above 1000 are set to 1000. If this field is negative, an ``INVALID_ARGUMENT`` error is returned. @@ -160,7 +160,7 @@ class CreateDocumentRequest(proto.Message): document_id (str): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1.Document], which - will become the final component of the + becomes the final component of the [Document.name][google.cloud.discoveryengine.v1.Document.name]. If the caller does not have permission to create the @@ -214,15 +214,15 @@ class UpdateDocumentRequest(proto.Message): [allow_missing][google.cloud.discoveryengine.v1.UpdateDocumentRequest.allow_missing] is not set, a ``NOT_FOUND`` error is returned. allow_missing (bool): - If set to true, and the + If set to ``true`` and the [Document][google.cloud.discoveryengine.v1.Document] is not found, a new - [Document][google.cloud.discoveryengine.v1.Document] will be + [Document][google.cloud.discoveryengine.v1.Document] is be created. update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided - imported 'document' to update. If not set, will - by default update all fields. + imported 'document' to update. If not set, by + default updates all fields. """ document: gcd_document.Document = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py index f4082aafa21d..fd4521f5603c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py @@ -116,7 +116,7 @@ class Engine(proto.Message): restriction of the Engine industry vertical is based on [DataStore][google.cloud.discoveryengine.v1.DataStore]: If unspecified, default to ``GENERIC``. Vertical on Engine has - to match vertical of the DataStore liniked to the engine. + to match vertical of the DataStore linked to the engine. common_config (google.cloud.discoveryengine_v1.types.Engine.CommonConfig): Common config spec that specifies the metadata of the engine. @@ -252,10 +252,9 @@ class CommonConfig(proto.Message): Attributes: company_name (str): - Immutable. The name of the company, business - or entity that is associated with the engine. - Setting this may help improve LLM related - features. + The name of the company, business or entity + that is associated with the engine. Setting this + may help improve LLM related features. """ company_name: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py new file mode 100644 index 000000000000..36de20d74df6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import grounding + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CheckGroundingSpec", + "CheckGroundingRequest", + "CheckGroundingResponse", + }, +) + + +class CheckGroundingSpec(proto.Message): + r"""Specification for the grounding check. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + citation_threshold (float): + The threshold (in [0,1]) used for determining whether a fact + must be cited for a claim in the answer candidate. Choosing + a higher threshold will lead to fewer but very strong + citations, while choosing a lower threshold may lead to more + but somewhat weaker citations. If unset, the threshold will + default to 0.6. + + This field is a member of `oneof`_ ``_citation_threshold``. + """ + + citation_threshold: float = proto.Field( + proto.DOUBLE, + number=1, + optional=True, + ) + + +class CheckGroundingRequest(proto.Message): + r"""Request message for + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + + Attributes: + grounding_config (str): + Required. The resource name of the grounding config, such as + ``projects/*/locations/global/groundingConfigs/default_grounding_config``. + answer_candidate (str): + Answer candidate to check. Can have a maximum + length of 1024 characters. + facts (MutableSequence[google.cloud.discoveryengine_v1.types.GroundingFact]): + List of facts for the grounding check. + We support up to 200 facts. + grounding_spec (google.cloud.discoveryengine_v1.types.CheckGroundingSpec): + Configuration of the grounding check. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. + """ + + grounding_config: str = proto.Field( + proto.STRING, + number=1, + ) + answer_candidate: str = proto.Field( + proto.STRING, + number=2, + ) + facts: MutableSequence[grounding.GroundingFact] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=grounding.GroundingFact, + ) + grounding_spec: "CheckGroundingSpec" = proto.Field( + proto.MESSAGE, + number=4, + message="CheckGroundingSpec", + ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class CheckGroundingResponse(proto.Message): + r"""Response message for the + [GroundedGenerationService.CheckGrounding][google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding] + method. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + support_score (float): + The support score for the input answer + candidate. Higher the score, higher is the + fraction of claims that are supported by the + provided facts. This is always set when a + response is returned. + + This field is a member of `oneof`_ ``_support_score``. + cited_chunks (MutableSequence[google.cloud.discoveryengine_v1.types.FactChunk]): + List of facts cited across all claims in the + answer candidate. These are derived from the + facts supplied in the request. + claims (MutableSequence[google.cloud.discoveryengine_v1.types.CheckGroundingResponse.Claim]): + Claim texts and citation info across all + claims in the answer candidate. + """ + + class Claim(proto.Message): + r"""Text and citation info for a claim in the answer candidate. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_pos (int): + Position indicating the start of the claim in + the answer candidate, measured in bytes. + + This field is a member of `oneof`_ ``_start_pos``. + end_pos (int): + Position indicating the end of the claim in + the answer candidate, exclusive. + + This field is a member of `oneof`_ ``_end_pos``. + claim_text (str): + Text for the claim in the answer candidate. + Always provided regardless of whether citations + or anti-citations are found. + citation_indices (MutableSequence[int]): + A list of indices (into 'cited_chunks') specifying the + citations associated with the claim. For instance [1,3,4] + means that cited_chunks[1], cited_chunks[3], cited_chunks[4] + are the facts cited supporting for the claim. A citation to + a fact indicates that the claim is supported by the fact. + grounding_check_required (bool): + Indicates that this claim required grounding check. When the + system decided this claim doesn't require + attribution/grounding check, this field will be set to + false. In that case, no grounding check was done for the + claim and therefore + [citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.citation_indices], + and + [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices] + should not be returned. + + This field is a member of `oneof`_ ``_grounding_check_required``. + """ + + start_pos: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_pos: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + claim_text: str = proto.Field( + proto.STRING, + number=3, + ) + citation_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=4, + ) + grounding_check_required: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + + support_score: float = proto.Field( + proto.FLOAT, + number=1, + optional=True, + ) + cited_chunks: MutableSequence[grounding.FactChunk] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=grounding.FactChunk, + ) + claims: MutableSequence[Claim] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Claim, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounding.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounding.py new file mode 100644 index 000000000000..983c795a6bed --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounding.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "GroundingFact", + "FactChunk", + }, +) + + +class GroundingFact(proto.Message): + r"""Grounding Fact. + + Attributes: + fact_text (str): + Text content of the fact. Can be at most 10K + characters long. + attributes (MutableMapping[str, str]): + Attributes associated with the fact. Common attributes + include ``source`` (indicating where the fact was sourced + from), ``author`` (indicating the author of the fact), and + so on. + """ + + fact_text: str = proto.Field( + proto.STRING, + number=1, + ) + attributes: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + +class FactChunk(proto.Message): + r"""Fact Chunk. + + Attributes: + chunk_text (str): + Text content of the fact chunk. Can be at + most 10K characters long. + source (str): + Source from which this fact chunk was + retrieved. If it was retrieved from the + GroundingFacts provided in the request then this + field will contain the index of the specific + fact from which this chunk was retrieved. + index (int): + The index of this chunk. Currently, only used + for the streaming mode. + source_metadata (MutableMapping[str, str]): + More fine-grained information for the source + reference. + """ + + chunk_text: str = proto.Field( + proto.STRING, + number=1, + ) + source: str = proto.Field( + proto.STRING, + number=2, + ) + index: int = proto.Field( + proto.INT32, + number=4, + ) + source_metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py index 3fd5da2fb960..c381cff79eee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py @@ -55,10 +55,10 @@ class GcsSource(proto.Message): Attributes: input_uris (MutableSequence[str]): - Required. Cloud Storage URIs to input files. URI can be up - to 2000 characters long. URIs can match the full object path - (for example, ``gs://bucket/directory/object.json``) or a - pattern matching one or more files, such as + Required. Cloud Storage URIs to input files. Each URI can be + up to 2000 characters long. URIs can match the full object + path (for example, ``gs://bucket/directory/object.json``) or + a pattern matching one or more files, such as ``gs://bucket/directory/*.json``. A request can contain at most 100 files (or 100,000 files if @@ -88,7 +88,7 @@ class GcsSource(proto.Message): Document. This can only be used by the GENERIC Data Store vertical. - Supported values for user even imports: + Supported values for user event imports: - ``user_event`` (default): One JSON [UserEvent][google.cloud.discoveryengine.v1.UserEvent] @@ -551,9 +551,9 @@ class FirestoreSource(proto.Message): Required. The Firestore database to copy the data from with a length limit of 256 characters. collection_id (str): - Required. The Firestore collection to copy - the data from with a length limit of 1,500 - characters. + Required. The Firestore collection (or + entity) to copy the data from with a length + limit of 1,500 characters. gcs_staging_dir (str): Intermediate Cloud Storage directory used for the import with a length limit of 2,000 diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/project.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/project.py new file mode 100644 index 000000000000..a05143a3b3f8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/project.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Project", + }, +) + + +class Project(proto.Message): + r"""Metadata and configurations for a Google Cloud project in the + service. + + Attributes: + name (str): + Output only. Full resource name of the project, for example + ``projects/{project_number}``. Note that when making + requests, project number and project id are both acceptable, + but the server will always respond in project number. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this project + is created. + provision_completion_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this project + is successfully provisioned. Empty value means + this project is still provisioning and is not + ready for use. + service_terms_map (MutableMapping[str, google.cloud.discoveryengine_v1.types.Project.ServiceTerms]): + Output only. A map of terms of services. The key is the + ``id`` of + [ServiceTerms][google.cloud.discoveryengine.v1.Project.ServiceTerms]. + """ + + class ServiceTerms(proto.Message): + r"""Metadata about the terms of service. + + Attributes: + id (str): + The unique identifier of this terms of service. Available + terms: + + - ``GA_DATA_USE_TERMS``: `Terms for data + use `__. + When using this as ``id``, the acceptable + [version][google.cloud.discoveryengine.v1.Project.ServiceTerms.version] + to provide is ``2022-11-23``. + version (str): + The version string of the terms of service. For acceptable + values, see the comments for + [id][google.cloud.discoveryengine.v1.Project.ServiceTerms.id] + above. + state (google.cloud.discoveryengine_v1.types.Project.ServiceTerms.State): + Whether the project has accepted/rejected the + service terms or it is still pending. + accept_time (google.protobuf.timestamp_pb2.Timestamp): + The last time when the project agreed to the + terms of service. + decline_time (google.protobuf.timestamp_pb2.Timestamp): + The last time when the project declined or + revoked the agreement to terms of service. + """ + + class State(proto.Enum): + r"""The agreement states this terms of service. + + Values: + STATE_UNSPECIFIED (0): + The default value of the enum. This value is + not actually used. + TERMS_ACCEPTED (1): + The project has given consent to the terms of + service. + TERMS_PENDING (2): + The project is pending to review and accept + the terms of service. + TERMS_DECLINED (3): + The project has declined or revoked the + agreement to terms of service. + """ + STATE_UNSPECIFIED = 0 + TERMS_ACCEPTED = 1 + TERMS_PENDING = 2 + TERMS_DECLINED = 3 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + state: "Project.ServiceTerms.State" = proto.Field( + proto.ENUM, + number=4, + enum="Project.ServiceTerms.State", + ) + accept_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + decline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + provision_completion_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + service_terms_map: MutableMapping[str, ServiceTerms] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message=ServiceTerms, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/project_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/project_service.py new file mode 100644 index 000000000000..6a85463e60ff --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/project_service.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "ProvisionProjectRequest", + "ProvisionProjectMetadata", + }, +) + + +class ProvisionProjectRequest(proto.Message): + r"""Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1.ProjectService.ProvisionProject] + method. + + Attributes: + name (str): + Required. Full resource name of a + [Project][google.cloud.discoveryengine.v1.Project], such as + ``projects/{project_id_or_number}``. + accept_data_use_terms (bool): + Required. Set to ``true`` to specify that caller has read + and would like to give consent to the `Terms for data + use `__. + data_use_terms_version (str): + Required. The version of the `Terms for data + use `__ that + caller has read and would like to give consent to. + + Acceptable version is ``2022-11-23``, and this may change + over time. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + accept_data_use_terms: bool = proto.Field( + proto.BOOL, + number=2, + ) + data_use_terms_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ProvisionProjectMetadata(proto.Message): + r"""Metadata associated with a project provision operation.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/rank_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/rank_service.py new file mode 100644 index 000000000000..7075a3ea9fc3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/rank_service.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "RankingRecord", + "RankRequest", + "RankResponse", + }, +) + + +class RankingRecord(proto.Message): + r"""Record message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + + Attributes: + id (str): + The unique ID to represent the record. + title (str): + The title of the record. Empty by default. At least one of + [title][google.cloud.discoveryengine.v1.RankingRecord.title] + or + [content][google.cloud.discoveryengine.v1.RankingRecord.content] + should be set otherwise an INVALID_ARGUMENT error is thrown. + content (str): + The content of the record. Empty by default. At least one of + [title][google.cloud.discoveryengine.v1.RankingRecord.title] + or + [content][google.cloud.discoveryengine.v1.RankingRecord.content] + should be set otherwise an INVALID_ARGUMENT error is thrown. + score (float): + The score of this record based on the given + query and selected model. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + title: str = proto.Field( + proto.STRING, + number=2, + ) + content: str = proto.Field( + proto.STRING, + number=3, + ) + score: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class RankRequest(proto.Message): + r"""Request message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + + Attributes: + ranking_config (str): + Required. The resource name of the rank service config, such + as + ``projects/{project_num}/locations/{location_id}/rankingConfigs/default_ranking_config``. + model (str): + The identifier of the model to use. It is one of: + + - ``semantic-ranker-512@latest``: Semantic ranking model + with maxiumn input token size 512. + + It is set to ``semantic-ranker-512@latest`` by default if + unspecified. + top_n (int): + The number of results to return. If this is + unset or no bigger than zero, returns all + results. + query (str): + The query to use. + records (MutableSequence[google.cloud.discoveryengine_v1.types.RankingRecord]): + Required. A list of records to rank. At most + 200 records to rank. + ignore_record_details_in_response (bool): + If true, the response will contain only + record ID and score. By default, it is false, + the response will contain record details. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. + """ + + ranking_config: str = proto.Field( + proto.STRING, + number=1, + ) + model: str = proto.Field( + proto.STRING, + number=2, + ) + top_n: int = proto.Field( + proto.INT32, + number=3, + ) + query: str = proto.Field( + proto.STRING, + number=4, + ) + records: MutableSequence["RankingRecord"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="RankingRecord", + ) + ignore_record_details_in_response: bool = proto.Field( + proto.BOOL, + number=6, + ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + + +class RankResponse(proto.Message): + r"""Response message for + [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] + method. + + Attributes: + records (MutableSequence[google.cloud.discoveryengine_v1.types.RankingRecord]): + A list of records sorted by descending score. + """ + + records: MutableSequence["RankingRecord"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="RankingRecord", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py index 690381d27704..f225dff737b9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py @@ -43,9 +43,8 @@ class RecommendRequest(proto.Message): ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` One default serving config is created along with your - recommendation engine creation. The engine ID will be used - as the ID of the default serving config. For example, for - Engine + recommendation engine creation. The engine ID is used as the + ID of the default serving config. For example, for Engine ``projects/*/locations/global/collections/*/engines/my-engine``, you can use ``projects/*/locations/global/collections/*/engines/my-engine/servingConfigs/my-engine`` @@ -74,9 +73,9 @@ class RecommendRequest(proto.Message): page_size (int): Maximum number of results to return. Set this property to the number of recommendation results - needed. If zero, the service will choose a + needed. If zero, the service chooses a reasonable default. The maximum allowed value is - 100. Values above 100 will be coerced to 100. + 100. Values above 100 are set to 100. filter (str): Filter for restricting recommendation results with a length limit of 5,000 characters. Currently, only filter @@ -97,41 +96,39 @@ class RecommendRequest(proto.Message): - (available: true) AND (launguage: ANY("en", "es")) OR (categories: ANY("Movie")) - If your filter blocks all results, the API will return - generic (unfiltered) popular Documents. If you only want - results strictly matching the filters, set - ``strictFiltering`` to True in + If your filter blocks all results, the API returns generic + (unfiltered) popular Documents. If you only want results + strictly matching the filters, set ``strictFiltering`` to + ``true`` in [RecommendRequest.params][google.cloud.discoveryengine.v1.RecommendRequest.params] to receive empty results instead. - Note that the API will never return + Note that the API never returns [Document][google.cloud.discoveryengine.v1.Document]s with - ``storageStatus`` of ``EXPIRED`` or ``DELETED`` regardless + ``storageStatus`` as ``EXPIRED`` or ``DELETED`` regardless of filter choices. validate_only (bool): - Use validate only mode for this - recommendation query. If set to true, a fake - model will be used that returns arbitrary - Document IDs. Note that the validate only mode - should only be used for testing the API, or if - the model is not ready. + Use validate only mode for this recommendation query. If set + to ``true``, a fake model is used that returns arbitrary + Document IDs. Note that the validate only mode should only + be used for testing the API, or if the model is not ready. params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional domain specific parameters for the recommendations. Allowed values: - - ``returnDocument``: Boolean. If set to true, the - associated Document object will be returned in + - ``returnDocument``: Boolean. If set to ``true``, the + associated Document object is returned in [RecommendResponse.RecommendationResult.document][google.cloud.discoveryengine.v1.RecommendResponse.RecommendationResult.document]. - ``returnScore``: Boolean. If set to true, the - recommendation 'score' corresponding to each returned - Document will be set in + recommendation score corresponding to each returned + Document is set in [RecommendResponse.RecommendationResult.metadata][google.cloud.discoveryengine.v1.RecommendResponse.RecommendationResult.metadata]. - The given 'score' indicates the probability of a Document + The given score indicates the probability of a Document conversion given the user's context and history. - ``strictFiltering``: Boolean. True by default. If set to - false, the service will return generic (unfiltered) + ``false``, the service returns generic (unfiltered) popular Documents instead of empty if your filter blocks all recommendation results. - ``diversityLevel``: String. Default empty. If set to be @@ -240,7 +237,7 @@ class RecommendationResult(proto.Message): Set if ``returnDocument`` is set to true in [RecommendRequest.params][google.cloud.discoveryengine.v1.RecommendRequest.params]. metadata (MutableMapping[str, google.protobuf.struct_pb2.Value]): - Additional Document metadata / annotations. + Additional Document metadata or annotations. Possible values: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py index eb76c6a631e7..aa4e7e547eee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py @@ -72,11 +72,11 @@ class ListSchemasRequest(proto.Message): The service may return fewer than this value. If unspecified, at most 100 - [Schema][google.cloud.discoveryengine.v1.Schema]s will be + [Schema][google.cloud.discoveryengine.v1.Schema]s are returned. - The maximum value is 1000; values above 1000 will be coerced - to 1000. + The maximum value is 1000; values above 1000 are set to + 1000. page_token (str): A page token, received from a previous [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] @@ -146,8 +146,8 @@ class CreateSchemaRequest(proto.Message): [Schema][google.cloud.discoveryengine.v1.Schema] to create. schema_id (str): Required. The ID to use for the - [Schema][google.cloud.discoveryengine.v1.Schema], which will - become the final component of the + [Schema][google.cloud.discoveryengine.v1.Schema], which + becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1.Schema.name]. This field should conform to @@ -183,8 +183,8 @@ class UpdateSchemaRequest(proto.Message): If set to true, and the [Schema][google.cloud.discoveryengine.v1.Schema] is not found, a new - [Schema][google.cloud.discoveryengine.v1.Schema] will be - created. In this situation, ``update_mask`` is ignored. + [Schema][google.cloud.discoveryengine.v1.Schema] is created. + In this situation, ``update_mask`` is ignored. """ schema: gcd_schema.Schema = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py index e72f559c6149..30c8eeec3d81 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py @@ -92,8 +92,12 @@ class SearchRequest(proto.Message): If this field is negative, an ``INVALID_ARGUMENT`` is returned. data_store_specs (MutableSequence[google.cloud.discoveryengine_v1.types.SearchRequest.DataStoreSpec]): - A list of data store specs to apply on a - search call. + Specs defining dataStores to filter on in a + search call and configurations for those + dataStores. This is only considered for engines + with multiple dataStores use case. For single + dataStore within an engine, they should use the + specs at the top level. filter (str): The filter syntax consists of an expression language for constructing a predicate from one or more fields of the @@ -132,8 +136,9 @@ class SearchRequest(proto.Message): ordered by a field in an [Document][google.cloud.discoveryengine.v1.Document] object. Leave it unset if ordered by relevance. ``order_by`` - expression is case-sensitive. For more information on - ordering, see + expression is case-sensitive. + + For more information on ordering for retail search, see `Ordering `__ If this field is unrecognizable, an ``INVALID_ARGUMENT`` is @@ -152,7 +157,7 @@ class SearchRequest(proto.Message): boost_spec (google.cloud.discoveryengine_v1.types.SearchRequest.BoostSpec): Boost specification to boost certain documents. For more information on boosting, see - `Boosting `__ + `Boosting `__ params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional search parameters. @@ -160,7 +165,8 @@ class SearchRequest(proto.Message): - ``user_country_code``: string. Default empty. If set to non-empty, results are restricted or boosted based on the - location provided. Example: user_country_code: "au" + location provided. For example, + ``user_country_code: "au"`` For available codes see `Country Codes `__ @@ -168,7 +174,7 @@ class SearchRequest(proto.Message): - ``search_type``: double. Default empty. Enables non-webpage searching depending on the value. The only valid non-default value is 1, which enables image - searching. Example: search_type: 1 + searching. For example, ``search_type: 1`` query_expansion_spec (google.cloud.discoveryengine_v1.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the conditions under which query @@ -245,7 +251,10 @@ class ImageQuery(proto.Message): ) class DataStoreSpec(proto.Message): - r"""A struct to define data stores to filter on in a search call. + r"""A struct to define data stores to filter on in a search call and + configurations for those data stores. A maximum of 1 DataStoreSpec + per data_store is allowed. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. Attributes: data_store (str): @@ -267,9 +276,9 @@ class FacetSpec(proto.Message): facet_key (google.cloud.discoveryengine_v1.types.SearchRequest.FacetSpec.FacetKey): Required. The facet key specification. limit (int): - Maximum of facet values that should be returned for this - facet. If unspecified, defaults to 20. The maximum allowed - value is 300. Values above 300 are coerced to 300. + Maximum facet values that are returned for this facet. If + unspecified, defaults to 20. The maximum allowed value is + 300. Values above 300 are coerced to 300. If this field is negative, an ``INVALID_ARGUMENT`` is returned. @@ -371,7 +380,7 @@ class FacetKey(proto.Message): 2021". Only supported on textual fields. Maximum is 10. contains (MutableSequence[str]): - Only get facet values that contains the given + Only get facet values that contain the given strings. For example, suppose "category" has three values "Action > 2022", "Action > 2021" and "Sci-Fi > 2022". If set "contains" to @@ -572,8 +581,8 @@ class SpellCorrectionSpec(proto.Message): Attributes: mode (google.cloud.discoveryengine_v1.types.SearchRequest.SpellCorrectionSpec.Mode): - The mode under which spell correction should take effect to - replace the original search query. Default to + The mode under which spell correction replaces the original + search query. Defaults to [Mode.AUTO][google.cloud.discoveryengine.v1.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. """ @@ -587,10 +596,10 @@ class Mode(proto.Enum): behavior defaults to [Mode.AUTO][google.cloud.discoveryengine.v1.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. SUGGESTION_ONLY (1): - Search API will try to find a spell suggestion if there is - any and put in the + Search API tries to find a spelling suggestion. If a + suggestion is found, it is put in the [SearchResponse.corrected_query][google.cloud.discoveryengine.v1.SearchResponse.corrected_query]. - The spell suggestion will not be used as the search query. + The spelling suggestion won't be used as the search query. AUTO (2): Automatic spell correction built by the Search API. Search will be based on the @@ -1064,9 +1073,8 @@ class SearchResult(proto.Message): of the searched [Document][google.cloud.discoveryengine.v1.Document]. document (google.cloud.discoveryengine_v1.types.Document): - The document data snippet in the search - response. Only fields that are marked as - retrievable are populated. + The document data snippet in the search response. Only + fields that are marked as ``retrievable`` are populated. """ id: str = proto.Field( @@ -1084,8 +1092,8 @@ class Facet(proto.Message): Attributes: key (str): - The key for this facet. E.g., "colors" or "price". It - matches + The key for this facet. For example, ``"colors"`` or + ``"price"``. It matches [SearchRequest.FacetSpec.FacetKey.key][google.cloud.discoveryengine.v1.SearchRequest.FacetSpec.FacetKey.key]. values (MutableSequence[google.cloud.discoveryengine_v1.types.SearchResponse.Facet.FacetValue]): The facet values for this field. @@ -1152,7 +1160,7 @@ class FacetValue(proto.Message): ) class Summary(proto.Message): - r"""Summary of the top N search result specified by the summary + r"""Summary of the top N search results specified by the summary spec. Attributes: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/session.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/session.py new file mode 100644 index 000000000000..ef22fda255a8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/session.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Session", + "Query", + }, +) + + +class Session(proto.Message): + r"""External session proto definition. + + Attributes: + name (str): + Immutable. Fully qualified name + ``projects/{project}/locations/global/collections/{collection}/engines/{engine}/sessions/*`` + state (google.cloud.discoveryengine_v1.types.Session.State): + The state of the session. + user_pseudo_id (str): + A unique identifier for tracking users. + turns (MutableSequence[google.cloud.discoveryengine_v1.types.Session.Turn]): + Turns. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the session started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the session finished. + """ + + class State(proto.Enum): + r"""Enumeration of the state of the session. + + Values: + STATE_UNSPECIFIED (0): + State is unspecified. + IN_PROGRESS (1): + The session is currently open. + """ + STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + + class Turn(proto.Message): + r"""Represents a turn, including a query from the user and a + answer from service. + + Attributes: + query (google.cloud.discoveryengine_v1.types.Query): + The user query. + answer (str): + The resource name of the answer to the user + query. + Only set if the answer generation (/answer API + call) happened in this turn. + """ + + query: "Query" = proto.Field( + proto.MESSAGE, + number=1, + message="Query", + ) + answer: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=3, + ) + turns: MutableSequence[Turn] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Turn, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +class Query(proto.Message): + r"""Defines a user inputed query. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + Plain text. + + This field is a member of `oneof`_ ``content``. + query_id (str): + Unique Id for the query. + """ + + text: str = proto.Field( + proto.STRING, + number=2, + oneof="content", + ) + query_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py index f1b8738b1020..319690cd53ca 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py @@ -75,6 +75,8 @@ class TargetSite(proto.Message): generated_uri_pattern (str): Output only. This is system-generated based on the provided_uri_pattern. + root_domain_uri (str): + Output only. Root domain of the provided_uri_pattern. site_verification_info (google.cloud.discoveryengine_v1.types.SiteVerificationInfo): Output only. Site ownership and validity verification status. @@ -188,6 +190,10 @@ class QuotaFailure(proto.Message): proto.STRING, number=4, ) + root_domain_uri: str = proto.Field( + proto.STRING, + number=10, + ) site_verification_info: "SiteVerificationInfo" = proto.Field( proto.MESSAGE, number=7, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py index 2c75b91a35b2..450bc325a9de 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py @@ -40,7 +40,7 @@ class UserEvent(proto.Message): r"""UserEvent captures all metadata information Discovery Engine - API needs to know about how end users interact with customers' + API needs to know about how end users interact with your website. Attributes: @@ -89,6 +89,26 @@ class UserEvent(proto.Message): to use Google Analytics `Client ID `__ for this field. + engine (str): + The [Engine][google.cloud.discoveryengine.v1.Engine] + resource name, in the form of + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + Optional. Only required for + [Engine][google.cloud.discoveryengine.v1.Engine] produced + user events. For example, user events from blended search. + data_store (str): + The [DataStore][google.cloud.discoveryengine.v1.DataStore] + resource full name, of the form + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + Optional. Only required for user events whose data store + can't by determined by + [UserEvent.engine][google.cloud.discoveryengine.v1.UserEvent.engine] + or + [UserEvent.documents][google.cloud.discoveryengine.v1.UserEvent.documents]. + If data store is set in the parent of write/import/collect + user event requests, this field can be omitted. event_time (google.protobuf.timestamp_pb2.Timestamp): Only required for [UserEventService.ImportUserEvents][google.cloud.discoveryengine.v1.UserEventService.ImportUserEvents] @@ -217,8 +237,7 @@ class UserEvent(proto.Message): A list of identifiers for the independent experiment groups this user event belongs to. This is used to distinguish between user events - associated with different experiment setups on - the customer end. + associated with different experiment setups. promotion_ids (MutableSequence[str]): The promotion IDs if this is an event associated with promotions. Currently, this @@ -264,6 +283,14 @@ class UserEvent(proto.Message): proto.STRING, number=2, ) + engine: str = proto.Field( + proto.STRING, + number=19, + ) + data_store: str = proto.Field( + proto.STRING, + number=20, + ) event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, @@ -631,8 +658,8 @@ class DocumentInfo(proto.Message): Quantity of the Document associated with the user event. Defaults to 1. - For example, this field will be 2 if two quantities of the - same Document are involved in a ``add-to-cart`` event. + For example, this field is 2 if two quantities of the same + Document are involved in a ``add-to-cart`` event. Required for events of the following event types: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py index c0e66062f707..e8ec084cb864 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py @@ -37,12 +37,25 @@ class WriteUserEventRequest(proto.Message): Attributes: parent (str): - Required. The parent DataStore resource name, such as + Required. The parent resource name. If the write user event + action is applied in + [DataStore][google.cloud.discoveryengine.v1.DataStore] + level, the format is: ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + If the write user event action is applied in [Location][] + level, for example, the event with + [Document][google.cloud.discoveryengine.v1.Document] across + multiple + [DataStore][google.cloud.discoveryengine.v1.DataStore], the + format is: ``projects/{project}/locations/{location}``. user_event (google.cloud.discoveryengine_v1.types.UserEvent): Required. User event to write. This field is a member of `oneof`_ ``_user_event``. + write_async (bool): + If set to true, the user event is written + asynchronously after validation, and the API + responds without waiting for the write. """ parent: str = proto.Field( @@ -55,6 +68,10 @@ class WriteUserEventRequest(proto.Message): optional=True, message=gcd_user_event.UserEvent, ) + write_async: bool = proto.Field( + proto.BOOL, + number=3, + ) class CollectUserEventRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py index 532f236ae202..7661bdba4177 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py @@ -27,6 +27,7 @@ CompletionServiceAsyncClient, CompletionServiceClient, ) +from .services.control_service import ControlServiceAsyncClient, ControlServiceClient from .services.conversational_search_service import ( ConversationalSearchServiceAsyncClient, ConversationalSearchServiceClient, @@ -86,11 +87,21 @@ Principal, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) from .types.completion import SuggestionDenyListEntry from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse +from .types.control import Condition, Control +from .types.control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from .types.conversation import ( Conversation, ConversationContext, @@ -117,6 +128,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .types.custom_tuning_model import CustomTuningModel from .types.data_store import DataStore from .types.data_store_service import ( CreateDataStoreMetadata, @@ -223,6 +235,8 @@ ) from .types.search_service import SearchRequest, SearchResponse from .types.search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, TrainCustomModelMetadata, TrainCustomModelRequest, TrainCustomModelResponse, @@ -281,6 +295,7 @@ "AclConfigServiceAsyncClient", "ChunkServiceAsyncClient", "CompletionServiceAsyncClient", + "ControlServiceAsyncClient", "ConversationalSearchServiceAsyncClient", "DataStoreServiceAsyncClient", "DocumentServiceAsyncClient", @@ -321,12 +336,16 @@ "CompleteQueryResponse", "CompletionInfo", "CompletionServiceClient", + "Condition", + "Control", + "ControlServiceClient", "Conversation", "ConversationContext", "ConversationMessage", "ConversationalSearchServiceClient", "ConverseConversationRequest", "ConverseConversationResponse", + "CreateControlRequest", "CreateConversationRequest", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -340,8 +359,10 @@ "CreateTargetSiteRequest", "CustomAttribute", "CustomFineTuningSpec", + "CustomTuningModel", "DataStore", "DataStoreServiceClient", + "DeleteControlRequest", "DeleteConversationRequest", "DeleteDataStoreMetadata", "DeleteDataStoreRequest", @@ -381,6 +402,7 @@ "GetAclConfigRequest", "GetAnswerRequest", "GetChunkRequest", + "GetControlRequest", "GetConversationRequest", "GetDataStoreRequest", "GetDocumentProcessingConfigRequest", @@ -411,8 +433,12 @@ "Interval", "ListChunksRequest", "ListChunksResponse", + "ListControlsRequest", + "ListControlsResponse", "ListConversationsRequest", "ListConversationsResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "ListDataStoresRequest", "ListDataStoresResponse", "ListDocumentsRequest", @@ -470,6 +496,7 @@ "SearchServiceClient", "SearchTier", "SearchTuningServiceClient", + "SearchUseCase", "ServingConfig", "ServingConfigServiceClient", "Session", @@ -489,6 +516,7 @@ "TuneEngineRequest", "TuneEngineResponse", "UpdateAclConfigRequest", + "UpdateControlRequest", "UpdateConversationRequest", "UpdateDataStoreRequest", "UpdateDocumentProcessingConfigRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json index e2658930445b..afe98968cc12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json @@ -167,6 +167,100 @@ } } }, + "ControlService": { + "clients": { + "grpc": { + "libraryClient": "ControlServiceClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ControlServiceAsyncClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + }, + "rest": { + "libraryClient": "ControlServiceClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + } + } + }, "ConversationalSearchService": { "clients": { "grpc": { @@ -1116,6 +1210,11 @@ "grpc": { "libraryClient": "SearchTuningServiceClient", "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, "TrainCustomModel": { "methods": [ "train_custom_model" @@ -1126,6 +1225,11 @@ "grpc-async": { "libraryClient": "SearchTuningServiceAsyncClient", "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, "TrainCustomModel": { "methods": [ "train_custom_model" @@ -1136,6 +1240,11 @@ "rest": { "libraryClient": "SearchTuningServiceClient", "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, "TrainCustomModel": { "methods": [ "train_custom_model" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py index eecdb9710b0a..35eff4a441d9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py @@ -578,6 +578,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "AclConfigServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py index ebf30ae7d1b3..fb4d94a738b7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py @@ -1004,6 +1004,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/base.py index 2bcbe31f777b..c03a5c45af54 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/base.py @@ -188,6 +188,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py index 8558d850487c..4b193f21ec57 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py @@ -294,6 +294,23 @@ def get_acl_config( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py index 5c3ba2968977..72096531d98e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py @@ -317,6 +317,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py index 38a3b6d8ef63..ef39ffbb3978 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py @@ -134,6 +134,27 @@ def post_update_acl_config( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AclConfigService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the AclConfigService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -474,6 +495,76 @@ def update_acl_config( # In C++ this would require a dynamic_cast return self._UpdateAclConfig(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(AclConfigServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -559,6 +650,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py index 3e909bb50506..dca34409f48b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py @@ -624,6 +624,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ChunkServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py index aec9ddb4abca..df49836e415a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py @@ -1080,6 +1080,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/base.py index de86616b3115..c9beeab1f119 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/base.py @@ -190,6 +190,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc.py index 1f7140fbc508..d7dfaf1f5d7e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc.py @@ -292,6 +292,23 @@ def list_chunks( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc_asyncio.py index 5d521fb275eb..43f3c3f41e92 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/grpc_asyncio.py @@ -315,6 +315,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py index 41341880aa7d..ce59a92031e1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py @@ -132,6 +132,27 @@ def post_list_chunks( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChunkService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ChunkService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -479,6 +500,76 @@ def list_chunks( # In C++ this would require a dynamic_cast return self._ListChunks(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ChunkServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -564,6 +655,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py index a3d03fc5b117..abbeae545fa3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py @@ -688,6 +688,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "CompletionServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py index 3c782bc3c4d9..46183f69fc17 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py @@ -1120,6 +1120,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/base.py index 208353a07965..634c74c73439 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/base.py @@ -214,6 +214,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc.py index 420e0e3b5e6c..b47bb1db15e3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc.py @@ -353,6 +353,23 @@ def purge_suggestion_deny_list_entries( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc_asyncio.py index 0e96b86d4044..0355fd11f4d2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/grpc_asyncio.py @@ -381,6 +381,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py index 22ef00f28f3c..925ce48599b5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py @@ -179,6 +179,27 @@ def post_purge_suggestion_deny_list_entries( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -333,6 +354,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -390,6 +423,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -812,6 +849,76 @@ def purge_suggestion_deny_list_entries( # In C++ this would require a dynamic_cast return self._PurgeSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -897,6 +1004,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/__init__.py new file mode 100644 index 000000000000..382ea5d38bcc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ControlServiceAsyncClient +from .client import ControlServiceClient + +__all__ = ( + "ControlServiceClient", + "ControlServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py new file mode 100644 index 000000000000..dcd365f82159 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py @@ -0,0 +1,1066 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha.services.control_service import pagers +from google.cloud.discoveryengine_v1alpha.types import common +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + +from .client import ControlServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .transports.grpc_asyncio import ControlServiceGrpcAsyncIOTransport + + +class ControlServiceAsyncClient: + """Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + """ + + _client: ControlServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ControlServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ControlServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ControlServiceClient._DEFAULT_UNIVERSE + + control_path = staticmethod(ControlServiceClient.control_path) + parse_control_path = staticmethod(ControlServiceClient.parse_control_path) + data_store_path = staticmethod(ControlServiceClient.data_store_path) + parse_data_store_path = staticmethod(ControlServiceClient.parse_data_store_path) + common_billing_account_path = staticmethod( + ControlServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ControlServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ControlServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ControlServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ControlServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ControlServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ControlServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ControlServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ControlServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ControlServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceAsyncClient: The constructed client. + """ + return ControlServiceClient.from_service_account_info.__func__(ControlServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceAsyncClient: The constructed client. + """ + return ControlServiceClient.from_service_account_file.__func__(ControlServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ControlServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ControlServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ControlServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ControlServiceClient).get_transport_class, type(ControlServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ControlServiceTransport, Callable[..., ControlServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the control service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ControlServiceTransport,Callable[..., ControlServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ControlServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ControlServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_control( + self, + request: Optional[Union[control_service.CreateControlRequest, dict]] = None, + *, + parent: Optional[str] = None, + control: Optional[gcd_control.Control] = None, + control_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + create already exists, an ALREADY_EXISTS error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_create_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = await client.create_control(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.CreateControlRequest, dict]]): + The request object. Request for CreateControl method. + parent (:class:`str`): + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control (:class:`google.cloud.discoveryengine_v1alpha.types.Control`): + Required. The Control to create. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control_id (:class:`str`): + Required. The ID to use for the Control, which will + become the final component of the Control's resource + name. + + This value must be within 1-63 characters. Valid + characters are /[a-z][0-9]-_/. + + This corresponds to the ``control_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, control, control_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.CreateControlRequest): + request = control_service.CreateControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if control is not None: + request.control = control + if control_id is not None: + request.control_id = control_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_control( + self, + request: Optional[Union[control_service.DeleteControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1alpha.Control] + to delete does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_delete_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteControlRequest( + name="name_value", + ) + + # Make the request + await client.delete_control(request=request) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.DeleteControlRequest, dict]]): + The request object. Request for DeleteControl method. + name (:class:`str`): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.DeleteControlRequest): + request = control_service.DeleteControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_control( + self, + request: Optional[Union[control_service.UpdateControlRequest, dict]] = None, + *, + control: Optional[gcd_control.Control] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Updates a Control. + + [Control][google.cloud.discoveryengine.v1alpha.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + update does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_update_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.UpdateControlRequest( + control=control, + ) + + # Make the request + response = await client.update_control(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.UpdateControlRequest, dict]]): + The request object. Request for UpdateControl method. + control (:class:`google.cloud.discoveryengine_v1alpha.types.Control`): + Required. The Control to update. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1alpha.Control] + to update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1alpha.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1alpha.Control.solution_type] + + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([control, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.UpdateControlRequest): + request = control_service.UpdateControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if control is not None: + request.control = control + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("control.name", request.control.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_control( + self, + request: Optional[Union[control_service.GetControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Gets a Control. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_get_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetControlRequest( + name="name_value", + ) + + # Make the request + response = await client.get_control(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.GetControlRequest, dict]]): + The request object. Request for GetControl method. + name (:class:`str`): + Required. The resource name of the Control to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.GetControlRequest): + request = control_service.GetControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_controls( + self, + request: Optional[Union[control_service.ListControlsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListControlsAsyncPager: + r"""Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_list_controls(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.ListControlsRequest, dict]]): + The request object. Request for ListControls method. + parent (:class:`str`): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.services.control_service.pagers.ListControlsAsyncPager: + Response for ListControls method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.ListControlsRequest): + request = control_service.ListControlsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_controls + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListControlsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ControlServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ControlServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py new file mode 100644 index 000000000000..d7db30b50760 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py @@ -0,0 +1,1500 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha.services.control_service import pagers +from google.cloud.discoveryengine_v1alpha.types import common +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + +from .transports.base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .transports.grpc import ControlServiceGrpcTransport +from .transports.grpc_asyncio import ControlServiceGrpcAsyncIOTransport +from .transports.rest import ControlServiceRestTransport + + +class ControlServiceClientMeta(type): + """Metaclass for the ControlService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ControlServiceTransport]] + _transport_registry["grpc"] = ControlServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ControlServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ControlServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ControlServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ControlServiceClient(metaclass=ControlServiceClientMeta): + """Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ControlServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ControlServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def control_path( + project: str, + location: str, + data_store: str, + control: str, + ) -> str: + """Returns a fully-qualified control string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/controls/{control}".format( + project=project, + location=location, + data_store=data_store, + control=control, + ) + + @staticmethod + def parse_control_path(path: str) -> Dict[str, str]: + """Parses a control path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/controls/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ControlServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ControlServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ControlServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ControlServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ControlServiceTransport, Callable[..., ControlServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the control service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ControlServiceTransport,Callable[..., ControlServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ControlServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ControlServiceClient._read_environment_variables() + self._client_cert_source = ControlServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ControlServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ControlServiceTransport) + if transport_provided: + # transport is a ControlServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ControlServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ControlServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ControlServiceTransport], Callable[..., ControlServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ControlServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_control( + self, + request: Optional[Union[control_service.CreateControlRequest, dict]] = None, + *, + parent: Optional[str] = None, + control: Optional[gcd_control.Control] = None, + control_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + create already exists, an ALREADY_EXISTS error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_create_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = client.create_control(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.CreateControlRequest, dict]): + The request object. Request for CreateControl method. + parent (str): + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control (google.cloud.discoveryengine_v1alpha.types.Control): + Required. The Control to create. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control_id (str): + Required. The ID to use for the Control, which will + become the final component of the Control's resource + name. + + This value must be within 1-63 characters. Valid + characters are /[a-z][0-9]-_/. + + This corresponds to the ``control_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, control, control_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.CreateControlRequest): + request = control_service.CreateControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if control is not None: + request.control = control + if control_id is not None: + request.control_id = control_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_control( + self, + request: Optional[Union[control_service.DeleteControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1alpha.Control] + to delete does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_delete_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteControlRequest( + name="name_value", + ) + + # Make the request + client.delete_control(request=request) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.DeleteControlRequest, dict]): + The request object. Request for DeleteControl method. + name (str): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.DeleteControlRequest): + request = control_service.DeleteControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_control( + self, + request: Optional[Union[control_service.UpdateControlRequest, dict]] = None, + *, + control: Optional[gcd_control.Control] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Updates a Control. + + [Control][google.cloud.discoveryengine.v1alpha.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + update does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_update_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.UpdateControlRequest( + control=control, + ) + + # Make the request + response = client.update_control(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.UpdateControlRequest, dict]): + The request object. Request for UpdateControl method. + control (google.cloud.discoveryengine_v1alpha.types.Control): + Required. The Control to update. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1alpha.Control] + to update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1alpha.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1alpha.Control.solution_type] + + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([control, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.UpdateControlRequest): + request = control_service.UpdateControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if control is not None: + request.control = control + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("control.name", request.control.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_control( + self, + request: Optional[Union[control_service.GetControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Gets a Control. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_get_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetControlRequest( + name="name_value", + ) + + # Make the request + response = client.get_control(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.GetControlRequest, dict]): + The request object. Request for GetControl method. + name (str): + Required. The resource name of the Control to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.GetControlRequest): + request = control_service.GetControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_controls( + self, + request: Optional[Union[control_service.ListControlsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListControlsPager: + r"""Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_list_controls(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.ListControlsRequest, dict]): + The request object. Request for ListControls method. + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.services.control_service.pagers.ListControlsPager: + Response for ListControls method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.ListControlsRequest): + request = control_service.ListControlsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_controls] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListControlsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ControlServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ControlServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/pagers.py new file mode 100644 index 000000000000..5beb145e50d3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1alpha.types import control, control_service + + +class ListControlsPager: + """A pager for iterating through ``list_controls`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1alpha.types.ListControlsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``controls`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListControls`` requests and continue to iterate + through the ``controls`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1alpha.types.ListControlsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., control_service.ListControlsResponse], + request: control_service.ListControlsRequest, + response: control_service.ListControlsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1alpha.types.ListControlsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1alpha.types.ListControlsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = control_service.ListControlsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[control_service.ListControlsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[control.Control]: + for page in self.pages: + yield from page.controls + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListControlsAsyncPager: + """A pager for iterating through ``list_controls`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1alpha.types.ListControlsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``controls`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListControls`` requests and continue to iterate + through the ``controls`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1alpha.types.ListControlsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[control_service.ListControlsResponse]], + request: control_service.ListControlsRequest, + response: control_service.ListControlsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1alpha.types.ListControlsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1alpha.types.ListControlsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = control_service.ListControlsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[control_service.ListControlsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[control.Control]: + async def async_generator(): + async for page in self.pages: + for response in page.controls: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/__init__.py new file mode 100644 index 000000000000..19aaa21a8e17 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ControlServiceTransport +from .grpc import ControlServiceGrpcTransport +from .grpc_asyncio import ControlServiceGrpcAsyncIOTransport +from .rest import ControlServiceRestInterceptor, ControlServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ControlServiceTransport]] +_transport_registry["grpc"] = ControlServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ControlServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ControlServiceRestTransport + +__all__ = ( + "ControlServiceTransport", + "ControlServiceGrpcTransport", + "ControlServiceGrpcAsyncIOTransport", + "ControlServiceRestTransport", + "ControlServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/base.py new file mode 100644 index 000000000000..f99d43cea622 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/base.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha import gapic_version as package_version +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ControlServiceTransport(abc.ABC): + """Abstract transport class for ControlService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_control: gapic_v1.method.wrap_method( + self.create_control, + default_timeout=None, + client_info=client_info, + ), + self.delete_control: gapic_v1.method.wrap_method( + self.delete_control, + default_timeout=None, + client_info=client_info, + ), + self.update_control: gapic_v1.method.wrap_method( + self.update_control, + default_timeout=None, + client_info=client_info, + ), + self.get_control: gapic_v1.method.wrap_method( + self.get_control, + default_timeout=None, + client_info=client_info, + ), + self.list_controls: gapic_v1.method.wrap_method( + self.list_controls, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_control( + self, + ) -> Callable[ + [control_service.CreateControlRequest], + Union[gcd_control.Control, Awaitable[gcd_control.Control]], + ]: + raise NotImplementedError() + + @property + def delete_control( + self, + ) -> Callable[ + [control_service.DeleteControlRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_control( + self, + ) -> Callable[ + [control_service.UpdateControlRequest], + Union[gcd_control.Control, Awaitable[gcd_control.Control]], + ]: + raise NotImplementedError() + + @property + def get_control( + self, + ) -> Callable[ + [control_service.GetControlRequest], + Union[control.Control, Awaitable[control.Control]], + ]: + raise NotImplementedError() + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], + Union[ + control_service.ListControlsResponse, + Awaitable[control_service.ListControlsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ControlServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/grpc.py new file mode 100644 index 000000000000..a62128ad4248 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/grpc.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + +from .base import DEFAULT_CLIENT_INFO, ControlServiceTransport + + +class ControlServiceGrpcTransport(ControlServiceTransport): + """gRPC backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_control( + self, + ) -> Callable[[control_service.CreateControlRequest], gcd_control.Control]: + r"""Return a callable for the create control method over gRPC. + + Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + create already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_control" not in self._stubs: + self._stubs["create_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/CreateControl", + request_serializer=control_service.CreateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["create_control"] + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], empty_pb2.Empty]: + r"""Return a callable for the delete control method over gRPC. + + Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1alpha.Control] + to delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteControlRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_control" not in self._stubs: + self._stubs["delete_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/DeleteControl", + request_serializer=control_service.DeleteControlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_control"] + + @property + def update_control( + self, + ) -> Callable[[control_service.UpdateControlRequest], gcd_control.Control]: + r"""Return a callable for the update control method over gRPC. + + Updates a Control. + + [Control][google.cloud.discoveryengine.v1alpha.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + update does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_control" not in self._stubs: + self._stubs["update_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/UpdateControl", + request_serializer=control_service.UpdateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["update_control"] + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], control.Control]: + r"""Return a callable for the get control method over gRPC. + + Gets a Control. + + Returns: + Callable[[~.GetControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_control" not in self._stubs: + self._stubs["get_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/GetControl", + request_serializer=control_service.GetControlRequest.serialize, + response_deserializer=control.Control.deserialize, + ) + return self._stubs["get_control"] + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], control_service.ListControlsResponse + ]: + r"""Return a callable for the list controls method over gRPC. + + Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore]. + + Returns: + Callable[[~.ListControlsRequest], + ~.ListControlsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_controls" not in self._stubs: + self._stubs["list_controls"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/ListControls", + request_serializer=control_service.ListControlsRequest.serialize, + response_deserializer=control_service.ListControlsResponse.deserialize, + ) + return self._stubs["list_controls"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ControlServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c1883857c7d7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/grpc_asyncio.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + +from .base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .grpc import ControlServiceGrpcTransport + + +class ControlServiceGrpcAsyncIOTransport(ControlServiceTransport): + """gRPC AsyncIO backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_control( + self, + ) -> Callable[ + [control_service.CreateControlRequest], Awaitable[gcd_control.Control] + ]: + r"""Return a callable for the create control method over gRPC. + + Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + create already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateControlRequest], + Awaitable[~.Control]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_control" not in self._stubs: + self._stubs["create_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/CreateControl", + request_serializer=control_service.CreateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["create_control"] + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete control method over gRPC. + + Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1alpha.Control] + to delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteControlRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_control" not in self._stubs: + self._stubs["delete_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/DeleteControl", + request_serializer=control_service.DeleteControlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_control"] + + @property + def update_control( + self, + ) -> Callable[ + [control_service.UpdateControlRequest], Awaitable[gcd_control.Control] + ]: + r"""Return a callable for the update control method over gRPC. + + Updates a Control. + + [Control][google.cloud.discoveryengine.v1alpha.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1alpha.Control] to + update does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateControlRequest], + Awaitable[~.Control]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_control" not in self._stubs: + self._stubs["update_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/UpdateControl", + request_serializer=control_service.UpdateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["update_control"] + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], Awaitable[control.Control]]: + r"""Return a callable for the get control method over gRPC. + + Gets a Control. + + Returns: + Callable[[~.GetControlRequest], + Awaitable[~.Control]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_control" not in self._stubs: + self._stubs["get_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/GetControl", + request_serializer=control_service.GetControlRequest.serialize, + response_deserializer=control.Control.deserialize, + ) + return self._stubs["get_control"] + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], + Awaitable[control_service.ListControlsResponse], + ]: + r"""Return a callable for the list controls method over gRPC. + + Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore]. + + Returns: + Callable[[~.ListControlsRequest], + Awaitable[~.ListControlsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_controls" not in self._stubs: + self._stubs["list_controls"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.ControlService/ListControls", + request_serializer=control_service.ListControlsRequest.serialize, + response_deserializer=control_service.ListControlsResponse.deserialize, + ) + return self._stubs["list_controls"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_control: gapic_v1.method_async.wrap_method( + self.create_control, + default_timeout=None, + client_info=client_info, + ), + self.delete_control: gapic_v1.method_async.wrap_method( + self.delete_control, + default_timeout=None, + client_info=client_info, + ), + self.update_control: gapic_v1.method_async.wrap_method( + self.update_control, + default_timeout=None, + client_info=client_info, + ), + self.get_control: gapic_v1.method_async.wrap_method( + self.get_control, + default_timeout=None, + client_info=client_info, + ), + self.list_controls: gapic_v1.method_async.wrap_method( + self.list_controls, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ControlServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py new file mode 100644 index 000000000000..af39e8465b32 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py @@ -0,0 +1,1231 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + +from .base import ControlServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ControlServiceRestInterceptor: + """Interceptor for ControlService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ControlServiceRestTransport. + + .. code-block:: python + class MyCustomControlServiceInterceptor(ControlServiceRestInterceptor): + def pre_create_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_control(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_control(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_controls(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_controls(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_control(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ControlServiceRestTransport(interceptor=MyCustomControlServiceInterceptor()) + client = ControlServiceClient(transport=transport) + + + """ + + def pre_create_control( + self, + request: control_service.CreateControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.CreateControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_create_control(self, response: gcd_control.Control) -> gcd_control.Control: + """Post-rpc interceptor for create_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_delete_control( + self, + request: control_service.DeleteControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.DeleteControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def pre_get_control( + self, + request: control_service.GetControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.GetControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_get_control(self, response: control.Control) -> control.Control: + """Post-rpc interceptor for get_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_list_controls( + self, + request: control_service.ListControlsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.ListControlsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_controls + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_list_controls( + self, response: control_service.ListControlsResponse + ) -> control_service.ListControlsResponse: + """Post-rpc interceptor for list_controls + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_update_control( + self, + request: control_service.UpdateControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.UpdateControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_update_control(self, response: gcd_control.Control) -> gcd_control.Control: + """Post-rpc interceptor for update_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ControlServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ControlServiceRestInterceptor + + +class ControlServiceRestTransport(ControlServiceTransport): + """REST backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ControlServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ControlServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateControl(ControlServiceRestStub): + def __hash__(self): + return hash("CreateControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "controlId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.CreateControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Call the create control method over HTTP. + + Args: + request (~.control_service.CreateControlRequest): + The request object. Request for CreateControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*}/controls", + "body": "control", + }, + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/controls", + "body": "control", + }, + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/engines/*}/controls", + "body": "control", + }, + ] + request, metadata = self._interceptor.pre_create_control(request, metadata) + pb_request = control_service.CreateControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_control.Control() + pb_resp = gcd_control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_control(resp) + return resp + + class _DeleteControl(ControlServiceRestStub): + def __hash__(self): + return hash("DeleteControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.DeleteControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete control method over HTTP. + + Args: + request (~.control_service.DeleteControlRequest): + The request object. Request for DeleteControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/controls/*}", + }, + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + }, + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/controls/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_control(request, metadata) + pb_request = control_service.DeleteControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetControl(ControlServiceRestStub): + def __hash__(self): + return hash("GetControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.GetControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Call the get control method over HTTP. + + Args: + request (~.control_service.GetControlRequest): + The request object. Request for GetControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/controls/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/controls/*}", + }, + ] + request, metadata = self._interceptor.pre_get_control(request, metadata) + pb_request = control_service.GetControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control.Control() + pb_resp = control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_control(resp) + return resp + + class _ListControls(ControlServiceRestStub): + def __hash__(self): + return hash("ListControls") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.ListControlsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control_service.ListControlsResponse: + r"""Call the list controls method over HTTP. + + Args: + request (~.control_service.ListControlsRequest): + The request object. Request for ListControls method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control_service.ListControlsResponse: + Response for ListControls method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*}/controls", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/controls", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/engines/*}/controls", + }, + ] + request, metadata = self._interceptor.pre_list_controls(request, metadata) + pb_request = control_service.ListControlsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control_service.ListControlsResponse() + pb_resp = control_service.ListControlsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_controls(resp) + return resp + + class _UpdateControl(ControlServiceRestStub): + def __hash__(self): + return hash("UpdateControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.UpdateControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Call the update control method over HTTP. + + Args: + request (~.control_service.UpdateControlRequest): + The request object. Request for UpdateControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{control.name=projects/*/locations/*/dataStores/*/controls/*}", + "body": "control", + }, + { + "method": "patch", + "uri": "/v1alpha/{control.name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + "body": "control", + }, + { + "method": "patch", + "uri": "/v1alpha/{control.name=projects/*/locations/*/collections/*/engines/*/controls/*}", + "body": "control", + }, + ] + request, metadata = self._interceptor.pre_update_control(request, metadata) + pb_request = control_service.UpdateControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_control.Control() + pb_resp = gcd_control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_control(resp) + return resp + + @property + def create_control( + self, + ) -> Callable[[control_service.CreateControlRequest], gcd_control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], control_service.ListControlsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListControls(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_control( + self, + ) -> Callable[[control_service.UpdateControlRequest], gcd_control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/evaluations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ControlServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py index b898ce71ee85..10e8551dcd34 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py @@ -1908,6 +1908,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ConversationalSearchServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py index 6e64395402f5..74bc6f95072f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py @@ -2429,6 +2429,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/base.py index af5dc87f9b81..b64c7db77ea9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/base.py @@ -360,6 +360,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc.py index bad941ec3148..a4c192e3ab2e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc.py @@ -637,6 +637,23 @@ def list_sessions( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc_asyncio.py index 9bfbcca27809..3751950234a1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/grpc_asyncio.py @@ -720,6 +720,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py index 8beb3903b8e6..81aed288849c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py @@ -474,6 +474,27 @@ def post_update_session(self, response: gcd_session.Session) -> gcd_session.Sess """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -2036,6 +2057,76 @@ def update_session( # In C++ this would require a dynamic_cast return self._UpdateSession(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ConversationalSearchServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -2121,6 +2212,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py index 7e23ff4ae695..efd9d373ddaf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py @@ -1374,6 +1374,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "DataStoreServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py index d37275da8ae2..10b6c73a3ba6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py @@ -1851,6 +1851,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/base.py index 4216e8fb0c4c..8efe8de56641 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/base.py @@ -278,6 +278,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc.py index 82266c3d7ddf..0ff351657530 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc.py @@ -484,6 +484,23 @@ def update_document_processing_config( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc_asyncio.py index 643ff3296a4a..380f99a92744 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/grpc_asyncio.py @@ -532,6 +532,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py index 5543795f24f1..20ed8c8e2c19 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py @@ -306,6 +306,27 @@ def post_update_document_processing_config( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -462,6 +483,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -519,6 +552,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -1377,6 +1414,76 @@ def update_document_processing_config( # In C++ this would require a dynamic_cast return self._UpdateDocumentProcessingConfig(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -1462,6 +1569,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py index 87da4df8b373..8602592e1b3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py @@ -594,7 +594,7 @@ async def sample_create_document(): document_id (:class:`str`): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1alpha.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1alpha.Document.name]. If the caller does not have permission to create the @@ -742,7 +742,7 @@ async def sample_update_document(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided imported 'document' to update. - If not set, will by default update all + If not set, by default updates all fields. This corresponds to the ``update_mask`` field @@ -929,8 +929,8 @@ async def import_documents( ) -> operation_async.AsyncOperation: r"""Bulk import of multiple [Document][google.cloud.discoveryengine.v1alpha.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1alpha.Document]s to be @@ -1184,7 +1184,7 @@ async def sample_get_processed_document(): # Initialize request argument(s) request = discoveryengine_v1alpha.GetProcessedDocumentRequest( name="name_value", - processed_document_type="CHUNKED_DOCUMENT", + processed_document_type="PNG_CONVERTED_DOCUMENT", ) # Make the request @@ -1389,6 +1389,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "DocumentServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py index 34ecf9b4f756..14e9ce710ecc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py @@ -1056,7 +1056,7 @@ def sample_create_document(): document_id (str): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1alpha.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1alpha.Document.name]. If the caller does not have permission to create the @@ -1201,7 +1201,7 @@ def sample_update_document(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided imported 'document' to update. - If not set, will by default update all + If not set, by default updates all fields. This corresponds to the ``update_mask`` field @@ -1382,8 +1382,8 @@ def import_documents( ) -> operation.Operation: r"""Bulk import of multiple [Document][google.cloud.discoveryengine.v1alpha.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1alpha.Document]s to be @@ -1633,7 +1633,7 @@ def sample_get_processed_document(): # Initialize request argument(s) request = discoveryengine_v1alpha.GetProcessedDocumentRequest( name="name_value", - processed_document_type="CHUNKED_DOCUMENT", + processed_document_type="PNG_CONVERTED_DOCUMENT", ) # Make the request @@ -1848,6 +1848,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py index bb2471bfc1a0..74c8eb8868bf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py @@ -296,6 +296,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py index 879138ad9bd6..944e1a2d593f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py @@ -404,8 +404,8 @@ def import_documents( Bulk import of multiple [Document][google.cloud.discoveryengine.v1alpha.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1alpha.Document]s to be @@ -505,6 +505,23 @@ def get_processed_document( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py index 47497238c84e..a1636fafd478 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py @@ -417,8 +417,8 @@ def import_documents( Bulk import of multiple [Document][google.cloud.discoveryengine.v1alpha.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1alpha.Document]s to be @@ -575,6 +575,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py index ac96353a83ad..c6f0962ef19a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py @@ -316,6 +316,27 @@ def post_update_document( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -472,6 +493,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -529,6 +562,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -1449,6 +1486,76 @@ def update_document( # In C++ this would require a dynamic_cast return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DocumentServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -1534,6 +1641,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py index 7c32b30b802d..f329154f4332 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py @@ -1396,6 +1396,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "EngineServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py index 86b9979f451f..7857b5e1cab3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py @@ -1827,6 +1827,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/base.py index 86f5fc03f5f1..629598b5812b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/base.py @@ -282,6 +282,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py index e9c0b2eca6a7..9151ad5c1a31 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py @@ -475,6 +475,23 @@ def tune_engine( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py index a1b76a33dd24..d1753a8de2dc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py @@ -535,6 +535,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py index fa0ea17da226..b5f01b4df43f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py @@ -320,6 +320,27 @@ def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -475,6 +496,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -532,6 +565,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -1427,6 +1464,76 @@ def update_engine( # In C++ this would require a dynamic_cast return self._UpdateEngine(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -1512,6 +1619,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py index fbc3a2e7f11f..54ce32285cc1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py @@ -486,6 +486,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "EstimateBillingServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py index 682f1755e71b..d29cf2c1c5ea 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py @@ -912,6 +912,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/base.py index bf430ebbb6fc..0b2bc0e52215 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/base.py @@ -179,6 +179,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc.py index 501c2f85b709..63ff56e9d4de 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc.py @@ -281,6 +281,23 @@ def estimate_data_size( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc_asyncio.py index b8fe187fff31..0c9375aa706b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/grpc_asyncio.py @@ -298,6 +298,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py index 30d076310d4d..c43f3f022640 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py @@ -111,6 +111,27 @@ def post_estimate_data_size( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EstimateBillingService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the EstimateBillingService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -265,6 +286,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -322,6 +355,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -510,6 +547,76 @@ def estimate_data_size( # In C++ this would require a dynamic_cast return self._EstimateDataSize(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EstimateBillingServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -595,6 +702,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py index 9591973d0cd5..018ca05b4080 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py @@ -483,6 +483,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "GroundedGenerationServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py index 89f0264e4e94..8466abfe190e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py @@ -906,6 +906,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/base.py index 6fc22370a973..277347596d48 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/base.py @@ -177,6 +177,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc.py index b778bbe7208f..cc48a4a83ecf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc.py @@ -267,6 +267,23 @@ def check_grounding( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc_asyncio.py index fdce1465e957..45239cf3806c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/grpc_asyncio.py @@ -281,6 +281,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py index 04fbcfd4c203..83c023a25579 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py @@ -105,6 +105,27 @@ def post_check_grounding( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GroundedGenerationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the GroundedGenerationService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -356,6 +377,76 @@ def check_grounding( # In C++ this would require a dynamic_cast return self._CheckGrounding(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(GroundedGenerationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -441,6 +532,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py index 0afe569f2cba..77f55f80a802 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py @@ -783,6 +783,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ProjectServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py index b6b811e91a8e..d2f430fdf986 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py @@ -1200,6 +1200,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/base.py index 7549460a8afd..67b49de07647 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/base.py @@ -209,6 +209,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc.py index 9b0e33d3a360..c5fd91134a48 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc.py @@ -349,6 +349,23 @@ def report_consent_change( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc_asyncio.py index 228b7324a3bd..8b22a0ebb612 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/grpc_asyncio.py @@ -379,6 +379,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py index 4fb549898e20..b415132045b4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py @@ -171,6 +171,27 @@ def post_report_consent_change( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -326,6 +347,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -383,6 +416,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -771,6 +808,76 @@ def report_consent_change( # In C++ this would require a dynamic_cast return self._ReportConsentChange(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -856,6 +963,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py index ac61f3977de0..c097d15f7515 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py @@ -458,6 +458,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "RankServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py index 8e5383a3272d..07a46c0e11e3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py @@ -888,6 +888,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/base.py index 66a0514579f2..68983d378283 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/base.py @@ -174,6 +174,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc.py index 96df6f1f1492..17c1ca135a9e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc.py @@ -263,6 +263,23 @@ def rank(self) -> Callable[[rank_service.RankRequest], rank_service.RankResponse def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc_asyncio.py index ca6c73444fbf..3b836ea3b87b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/grpc_asyncio.py @@ -279,6 +279,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py index fb339d0730e0..254839651c73 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py @@ -101,6 +101,27 @@ def post_rank( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RankService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RankService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -347,6 +368,76 @@ def rank(self) -> Callable[[rank_service.RankRequest], rank_service.RankResponse # In C++ this would require a dynamic_cast return self._Rank(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RankServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -432,6 +523,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py index 56fb784d89c9..eff1a6b29402 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py @@ -65,8 +65,14 @@ class RecommendationServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = RecommendationServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = RecommendationServiceClient._DEFAULT_UNIVERSE + data_store_path = staticmethod(RecommendationServiceClient.data_store_path) + parse_data_store_path = staticmethod( + RecommendationServiceClient.parse_data_store_path + ) document_path = staticmethod(RecommendationServiceClient.document_path) parse_document_path = staticmethod(RecommendationServiceClient.parse_document_path) + engine_path = staticmethod(RecommendationServiceClient.engine_path) + parse_engine_path = staticmethod(RecommendationServiceClient.parse_engine_path) serving_config_path = staticmethod(RecommendationServiceClient.serving_config_path) parse_serving_config_path = staticmethod( RecommendationServiceClient.parse_serving_config_path @@ -475,6 +481,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "RecommendationServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py index afbfa92350f2..b72e350eab5a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py @@ -185,6 +185,28 @@ def transport(self) -> RecommendationServiceTransport: """ return self._transport + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def document_path( project: str, @@ -211,6 +233,30 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def serving_config_path( project: str, @@ -930,6 +976,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/base.py index e3931858cd2d..5e40189c69b6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/base.py @@ -177,6 +177,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc.py index 1e255abed63b..3101198652d3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc.py @@ -268,6 +268,23 @@ def recommend( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc_asyncio.py index b38f938397e3..06777f71f811 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/grpc_asyncio.py @@ -282,6 +282,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py index b3eae0c09863..15b829f895f4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py @@ -103,6 +103,27 @@ def post_recommend( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RecommendationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RecommendationService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -361,6 +382,76 @@ def recommend( # In C++ this would require a dynamic_cast return self._Recommend(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RecommendationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -446,6 +537,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py index 068f35d212ea..805183f88b1c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py @@ -569,7 +569,7 @@ async def sample_create_schema(): schema_id (:class:`str`): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1alpha.Schema], - which will become the final component of the + which becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1alpha.Schema.name]. This field should conform to @@ -999,6 +999,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SchemaServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py index 8b5ce14f2a90..010e78468539 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py @@ -1003,7 +1003,7 @@ def sample_create_schema(): schema_id (str): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1alpha.Schema], - which will become the final component of the + which becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1alpha.Schema.name]. This field should conform to @@ -1438,6 +1438,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/base.py index 52907bca0020..617aa5ddee42 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/base.py @@ -238,6 +238,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc.py index 9bc22ef0a16a..5faf213de336 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc.py @@ -385,6 +385,23 @@ def delete_schema( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc_asyncio.py index e4b645117519..b9614c8869ee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/grpc_asyncio.py @@ -430,6 +430,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py index f64580bf219e..7cb12e185fbb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py @@ -231,6 +231,27 @@ def post_update_schema( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -386,6 +407,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -443,6 +476,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -1045,6 +1082,76 @@ def update_schema( # In C++ this would require a dynamic_cast return self._UpdateSchema(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -1130,6 +1237,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py index 1f7f09d15134..8eb30f6032b2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py @@ -483,6 +483,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SearchServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py index 581b1f51c797..b499fca8318f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py @@ -1006,6 +1006,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/base.py index d6e44a9b26ee..057f82fff237 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/base.py @@ -174,6 +174,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc.py index b9be0a4c447b..b13fafe7dbab 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc.py @@ -264,6 +264,23 @@ def search( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc_asyncio.py index 27e8886f6499..38473e859505 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/grpc_asyncio.py @@ -280,6 +280,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py index 9c5be5135af1..1826592e4eba 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py @@ -101,6 +101,27 @@ def post_search( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -359,6 +380,76 @@ def search( # In C++ this would require a dynamic_cast return self._Search(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -444,6 +535,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py index 69b41c9b795e..be32f606d365 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py @@ -48,7 +48,10 @@ from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1alpha.types import search_tuning_service +from google.cloud.discoveryengine_v1alpha.types import ( + custom_tuning_model, + search_tuning_service, +) from .client import SearchTuningServiceClient from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport @@ -67,6 +70,12 @@ class SearchTuningServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = SearchTuningServiceClient._DEFAULT_UNIVERSE + custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.custom_tuning_model_path + ) + parse_custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.parse_custom_tuning_model_path + ) data_store_path = staticmethod(SearchTuningServiceClient.data_store_path) parse_data_store_path = staticmethod( SearchTuningServiceClient.parse_data_store_path @@ -373,6 +382,96 @@ async def sample_train_custom_model(): # Done; return the response. return response + async def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.ListCustomModelsRequest, dict]]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_custom_models + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, @@ -487,6 +586,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SearchTuningServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py index 7969518e8e69..091d0a1bf534 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py @@ -53,7 +53,10 @@ from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1alpha.types import search_tuning_service +from google.cloud.discoveryengine_v1alpha.types import ( + custom_tuning_model, + search_tuning_service, +) from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport from .transports.grpc import SearchTuningServiceGrpcTransport @@ -187,6 +190,30 @@ def transport(self) -> SearchTuningServiceTransport: """ return self._transport + @staticmethod + def custom_tuning_model_path( + project: str, + location: str, + data_store: str, + custom_tuning_model: str, + ) -> str: + """Returns a fully-qualified custom_tuning_model string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + + @staticmethod + def parse_custom_tuning_model_path(path: str) -> Dict[str, str]: + """Parses a custom_tuning_model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/customTuningModels/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def data_store_path( project: str, @@ -789,6 +816,94 @@ def sample_train_custom_model(): # Done; return the response. return response + def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.ListCustomModelsRequest, dict]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_custom_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "SearchTuningServiceClient": return self @@ -916,6 +1031,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py index eda06118810a..269b7b4fb375 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py @@ -133,6 +133,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_custom_models: gapic_v1.method.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -158,6 +163,18 @@ def train_custom_model( ]: raise NotImplementedError() + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Union[ + search_tuning_service.ListCustomModelsResponse, + Awaitable[search_tuning_service.ListCustomModelsResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, @@ -179,6 +196,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py index 847234e0004e..adf9a2477f71 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py @@ -278,9 +278,55 @@ def train_custom_model( ) return self._stubs["train_custom_model"] + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + ~.ListCustomModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py index 90ab05cdc911..085c566c2077 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py @@ -285,6 +285,35 @@ def train_custom_model( ) return self._stubs["train_custom_model"] + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Awaitable[search_tuning_service.ListCustomModelsResponse], + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + Awaitable[~.ListCustomModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -293,11 +322,33 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_custom_models: gapic_v1.method_async.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), } def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py index 71e5ae9679d5..49ede827a7b3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py @@ -72,6 +72,14 @@ class SearchTuningServiceRestInterceptor: .. code-block:: python class MyCustomSearchTuningServiceInterceptor(SearchTuningServiceRestInterceptor): + def pre_list_custom_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_custom_models(self, response): + logging.log(f"Received response: {response}") + return response + def pre_train_custom_model(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -86,6 +94,31 @@ def post_train_custom_model(self, response): """ + def pre_list_custom_models( + self, + request: search_tuning_service.ListCustomModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_custom_models( + self, response: search_tuning_service.ListCustomModelsResponse + ) -> search_tuning_service.ListCustomModelsResponse: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + def pre_train_custom_model( self, request: search_tuning_service.TrainCustomModelRequest, @@ -111,6 +144,27 @@ def post_train_custom_model( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -265,6 +319,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -322,6 +388,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -403,6 +473,98 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _ListCustomModels(SearchTuningServiceRestStub): + def __hash__(self): + return hash("ListCustomModels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.ListCustomModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Call the list custom models method over HTTP. + + Args: + request (~.search_tuning_service.ListCustomModelsRequest): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_tuning_service.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{data_store=projects/*/locations/*/collections/*/dataStores/*}/customModels", + }, + ] + request, metadata = self._interceptor.pre_list_custom_models( + request, metadata + ) + pb_request = search_tuning_service.ListCustomModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_tuning_service.ListCustomModelsResponse() + pb_resp = search_tuning_service.ListCustomModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_custom_models(resp) + return resp + class _TrainCustomModel(SearchTuningServiceRestStub): def __hash__(self): return hash("TrainCustomModel") @@ -500,6 +662,17 @@ def __call__( resp = self._interceptor.post_train_custom_model(resp) return resp + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCustomModels(self._session, self._host, self._interceptor) # type: ignore + @property def train_custom_model( self, @@ -510,6 +683,76 @@ def train_custom_model( # In C++ this would require a dynamic_cast return self._TrainCustomModel(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -595,6 +838,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py index 299f161ffe59..7ebc3a4faa06 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py @@ -763,6 +763,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ServingConfigServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py index b208401312d5..61be3361f2cc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py @@ -1187,6 +1187,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/base.py index bd412c1569af..817b3241ab9e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/base.py @@ -212,6 +212,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc.py index fee6dc3ca1a8..03b2bec27f91 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc.py @@ -334,6 +334,23 @@ def list_serving_configs( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc_asyncio.py index 08a0261a7cdb..c16027950b5d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/grpc_asyncio.py @@ -359,6 +359,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py index c1818764bf5b..32dabfc1c1e1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py @@ -175,6 +175,27 @@ def post_update_serving_config( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -661,6 +682,76 @@ def update_serving_config( # In C++ this would require a dynamic_cast return self._UpdateServingConfig(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ServingConfigServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -746,6 +837,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py index b51977c19bf5..c44622a79579 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py @@ -1845,6 +1845,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SiteSearchEngineServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py index 2e9148200c75..a2fa85ed7bdf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py @@ -2272,6 +2272,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py index 0bd703fed3df..7668b85a3147 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py @@ -345,6 +345,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py index c4ab0d1be8b0..3cdc6a9ae9db 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py @@ -616,6 +616,23 @@ def fetch_domain_verification_status( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py index 19adf84da8e9..324f2b8f5683 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py @@ -692,6 +692,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py index 1a3f8294ff31..1b1de82c41ee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py @@ -482,6 +482,27 @@ def post_update_target_site( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -636,6 +657,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -693,6 +726,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -2101,6 +2138,76 @@ def update_target_site( # In C++ this would require a dynamic_cast return self._UpdateTargetSite(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -2186,6 +2293,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py index 4cc708af696c..7caf68634967 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py @@ -82,6 +82,8 @@ class UserEventServiceAsyncClient: parse_data_store_path = staticmethod(UserEventServiceClient.parse_data_store_path) document_path = staticmethod(UserEventServiceClient.document_path) parse_document_path = staticmethod(UserEventServiceClient.parse_document_path) + engine_path = staticmethod(UserEventServiceClient.engine_path) + parse_engine_path = staticmethod(UserEventServiceClient.parse_engine_path) common_billing_account_path = staticmethod( UserEventServiceClient.common_billing_account_path ) @@ -327,7 +329,7 @@ async def sample_write_user_event(): UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ # Create or coerce a protobuf request object. @@ -611,7 +613,7 @@ async def import_user_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Bulk import of User events. Request processing might + r"""Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -830,6 +832,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "UserEventServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py index 7005248fd455..c996847e40b9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py @@ -246,6 +246,30 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -772,7 +796,7 @@ def sample_write_user_event(): UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ # Create or coerce a protobuf request object. @@ -1050,7 +1074,7 @@ def import_user_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Bulk import of User events. Request processing might + r"""Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -1280,6 +1304,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/base.py index 1a3dd4e810ca..d4168f0532d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/base.py @@ -236,6 +236,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc.py index aae046963def..c83ae50e3cbf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc.py @@ -351,7 +351,7 @@ def import_user_events( ) -> Callable[[import_config.ImportUserEventsRequest], operations_pb2.Operation]: r"""Return a callable for the import user events method over gRPC. - Bulk import of User events. Request processing might + Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -381,6 +381,23 @@ def import_user_events( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc_asyncio.py index 2231af5cf8a1..82eb96258454 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/grpc_asyncio.py @@ -365,7 +365,7 @@ def import_user_events( ]: r"""Return a callable for the import user events method over gRPC. - Bulk import of User events. Request processing might + Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -429,6 +429,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py index 2be9f70f7f63..117026a6902c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py @@ -208,6 +208,27 @@ def post_write_user_event( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -363,6 +384,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -420,6 +453,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", @@ -885,7 +922,7 @@ def __call__( UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ @@ -900,6 +937,11 @@ def __call__( "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:write", "body": "user_event", }, + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/userEvents:write", + "body": "user_event", + }, ] request, metadata = self._interceptor.pre_write_user_event( request, metadata @@ -982,6 +1024,76 @@ def write_user_event( # In C++ this would require a dynamic_cast return self._WriteUserEvent(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(UserEventServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -1067,6 +1179,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/operations/*}", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py index 2a7475ecb409..97c2518c9a70 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py @@ -30,11 +30,21 @@ Principal, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) from .completion import SuggestionDenyListEntry from .completion_service import CompleteQueryRequest, CompleteQueryResponse +from .control import Condition, Control +from .control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from .conversation import ( Conversation, ConversationContext, @@ -61,6 +71,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .custom_tuning_model import CustomTuningModel from .data_store import DataStore from .data_store_service import ( CreateDataStoreMetadata, @@ -167,6 +178,8 @@ ) from .search_service import SearchRequest, SearchResponse from .search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, TrainCustomModelMetadata, TrainCustomModelRequest, TrainCustomModelResponse, @@ -242,10 +255,19 @@ "IndustryVertical", "SearchAddOn", "SearchTier", + "SearchUseCase", "SolutionType", "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", + "Condition", + "Control", + "CreateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + "UpdateControlRequest", "Conversation", "ConversationContext", "ConversationMessage", @@ -268,6 +290,7 @@ "ListSessionsResponse", "UpdateConversationRequest", "UpdateSessionRequest", + "CustomTuningModel", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -362,6 +385,8 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "TrainCustomModelMetadata", "TrainCustomModelRequest", "TrainCustomModelResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py index ce69b8fb477d..c91f8d65eae8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -180,6 +181,10 @@ class UnstructuredDocumentInfo(proto.Message): chunk_contents (MutableSequence[google.cloud.discoveryengine_v1alpha.types.Answer.Reference.UnstructuredDocumentInfo.ChunkContent]): List of cited chunk contents derived from document content. + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON metadata for the + document. It is populated from the struct data + from the Chunk in search result. """ class ChunkContent(proto.Message): @@ -220,6 +225,11 @@ class ChunkContent(proto.Message): number=4, message="Answer.Reference.UnstructuredDocumentInfo.ChunkContent", ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) class ChunkInfo(proto.Message): r"""Chunk information. @@ -251,6 +261,10 @@ class DocumentMetadata(proto.Message): Title. page_identifier (str): Page identifier. + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON metadata for the + document. It is populated from the struct data + from the Chunk in search result. """ document: str = proto.Field( @@ -269,6 +283,11 @@ class DocumentMetadata(proto.Message): proto.STRING, number=4, ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) chunk: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/chunk.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/chunk.py index c439f6ed7dc4..3f7b6d7f569e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/chunk.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/chunk.py @@ -32,6 +32,9 @@ class Chunk(proto.Message): r"""Chunk captures all raw metadata information of items to be recommended or searched in the chunk mode. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): The full resource name of the chunk. Format: @@ -44,6 +47,13 @@ class Chunk(proto.Message): content (str): Content is a string from a document (parsed content). + relevance_score (float): + The relevance score based on similarity. Higher score + indicates higher chunk relevance. The score is in range + [-1.0, 1.0]. Only populated on + [SearchService.SearchResponse][]. + + This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1alpha.types.Chunk.DocumentMetadata): Metadata of the document from the current chunk. @@ -150,6 +160,11 @@ class ChunkMetadata(proto.Message): proto.STRING, number=3, ) + relevance_score: float = proto.Field( + proto.DOUBLE, + number=8, + optional=True, + ) document_metadata: DocumentMetadata = proto.Field( proto.MESSAGE, number=5, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/common.py index 09db19e279da..3dbc829821cb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/common.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/common.py @@ -24,6 +24,7 @@ manifest={ "IndustryVertical", "SolutionType", + "SearchUseCase", "SearchTier", "SearchAddOn", "Interval", @@ -86,6 +87,26 @@ class SolutionType(proto.Enum): SOLUTION_TYPE_GENERATIVE_CHAT = 4 +class SearchUseCase(proto.Enum): + r"""Defines a further subdivision of ``SolutionType``. Specifically + applies to + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_SEARCH]. + + Values: + SEARCH_USE_CASE_UNSPECIFIED (0): + Value used when unset. Will not occur in CSS. + SEARCH_USE_CASE_SEARCH (1): + Search use case. Expects the traffic has a non-empty + [query][google.cloud.discoveryengine.v1alpha.SearchRequest.query]. + SEARCH_USE_CASE_BROWSE (2): + Browse use case. Expects the traffic has an empty + [query][google.cloud.discoveryengine.v1alpha.SearchRequest.query]. + """ + SEARCH_USE_CASE_UNSPECIFIED = 0 + SEARCH_USE_CASE_SEARCH = 1 + SEARCH_USE_CASE_BROWSE = 2 + + class SearchTier(proto.Enum): r"""Tiers of search features. Different tiers might have different pricing. To learn more, check the pricing diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py new file mode 100644 index 000000000000..6f9d1b7ecb2c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1alpha", + manifest={ + "Condition", + "Control", + }, +) + + +class Condition(proto.Message): + r"""Defines circumstances to be checked before allowing a + behavior + + Attributes: + query_terms (MutableSequence[google.cloud.discoveryengine_v1alpha.types.Condition.QueryTerm]): + Search only + A list of terms to match the query on. + + Maximum of 10 query terms. + active_time_range (MutableSequence[google.cloud.discoveryengine_v1alpha.types.Condition.TimeRange]): + Range of time(s) specifying when condition is + active. + Maximum of 10 time ranges. + """ + + class QueryTerm(proto.Message): + r"""Matcher for search request query + + Attributes: + value (str): + The specific query value to match against + + Must be lowercase, must be UTF-8. Can have at most 3 space + separated terms if full_match is true. Cannot be an empty + string. Maximum length of 5000 characters. + full_match (bool): + Whether the search query needs to exactly + match the query term. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + full_match: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class TimeRange(proto.Message): + r"""Used for time-dependent conditions. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start of time range. + + Range is inclusive. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End of time range. + + Range is inclusive. + Must be in the future. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + query_terms: MutableSequence[QueryTerm] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=QueryTerm, + ) + active_time_range: MutableSequence[TimeRange] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TimeRange, + ) + + +class Control(proto.Message): + r"""Defines a conditioned behavior to employ during serving. Must be + attached to a + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + to be considered at serving time. Permitted actions dependent on + ``SolutionType``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + boost_action (google.cloud.discoveryengine_v1alpha.types.Control.BoostAction): + Defines a boost-type control + + This field is a member of `oneof`_ ``action``. + filter_action (google.cloud.discoveryengine_v1alpha.types.Control.FilterAction): + Defines a filter-type control + Currently not supported by Recommendation + + This field is a member of `oneof`_ ``action``. + redirect_action (google.cloud.discoveryengine_v1alpha.types.Control.RedirectAction): + Defines a redirect-type control. + + This field is a member of `oneof`_ ``action``. + synonyms_action (google.cloud.discoveryengine_v1alpha.types.Control.SynonymsAction): + Treats a group of terms as synonyms of one + another. + + This field is a member of `oneof`_ ``action``. + name (str): + Immutable. Fully qualified name + ``projects/*/locations/global/dataStore/*/controls/*`` + display_name (str): + Required. Human readable name. The identifier + used in UI views. + Must be UTF-8 encoded string. Length limit is + 128 characters. Otherwise an INVALID ARGUMENT + error is thrown. + associated_serving_config_ids (MutableSequence[str]): + Output only. List of all + [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] + ids this control is attached to. May take up to 10 minutes + to update after changes. + solution_type (google.cloud.discoveryengine_v1alpha.types.SolutionType): + Required. Immutable. What solution the + control belongs to. + Must be compatible with vertical of resource. + Otherwise an INVALID ARGUMENT error is thrown. + use_cases (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchUseCase]): + Specifies the use case for the control. Affects what + condition fields can be set. Only applies to + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_SEARCH]. + Currently only allow one use case per control. Must be set + when solution_type is + [SolutionType.SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_SEARCH]. + conditions (MutableSequence[google.cloud.discoveryengine_v1alpha.types.Condition]): + Determines when the associated action will + trigger. + Omit to always apply the action. + Currently only a single condition may be + specified. Otherwise an INVALID ARGUMENT error + is thrown. + """ + + class BoostAction(proto.Message): + r"""Adjusts order of products in returned list. + + Attributes: + boost (float): + Required. Strength of the boost, which should be in [-1, 1]. + Negative boost means demotion. Default is 0.0 (No-op). + filter (str): + Required. Specifies which products to apply + the boost to. + If no filter is provided all products will be + boosted (No-op). Syntax documentation: + + https://cloud.google.com/retail/docs/filter-and-order + Maximum length is 5000 characters. + Otherwise an INVALID ARGUMENT error is thrown. + data_store (str): + Required. Specifies which data store's documents can be + boosted by this control. Full data store name e.g. + projects/123/locations/global/collections/default_collection/dataStores/default_data_store + """ + + boost: float = proto.Field( + proto.FLOAT, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + data_store: str = proto.Field( + proto.STRING, + number=3, + ) + + class FilterAction(proto.Message): + r"""Specified which products may be included in results. + Uses same filter as boost. + + Attributes: + filter (str): + Required. A filter to apply on the matching + condition results. + Required + Syntax documentation: + + https://cloud.google.com/retail/docs/filter-and-order + Maximum length is 5000 characters. Otherwise an + INVALID ARGUMENT error is thrown. + data_store (str): + Required. Specifies which data store's documents can be + filtered by this control. Full data store name e.g. + projects/123/locations/global/collections/default_collection/dataStores/default_data_store + """ + + filter: str = proto.Field( + proto.STRING, + number=1, + ) + data_store: str = proto.Field( + proto.STRING, + number=2, + ) + + class RedirectAction(proto.Message): + r"""Redirects a shopper to the provided URI. + + Attributes: + redirect_uri (str): + Required. The URI to which the shopper will + be redirected. + Required. + URI must have length equal or less than 2000 + characters. Otherwise an INVALID ARGUMENT error + is thrown. + """ + + redirect_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + class SynonymsAction(proto.Message): + r"""Creates a set of terms that will act as synonyms of one + another. + Example: "happy" will also be considered as "glad", "glad" will + also be considered as "happy". + + Attributes: + synonyms (MutableSequence[str]): + Defines a set of synonyms. + Can specify up to 100 synonyms. + Must specify at least 2 synonyms. Otherwise an + INVALID ARGUMENT error is thrown. + """ + + synonyms: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + boost_action: BoostAction = proto.Field( + proto.MESSAGE, + number=6, + oneof="action", + message=BoostAction, + ) + filter_action: FilterAction = proto.Field( + proto.MESSAGE, + number=7, + oneof="action", + message=FilterAction, + ) + redirect_action: RedirectAction = proto.Field( + proto.MESSAGE, + number=9, + oneof="action", + message=RedirectAction, + ) + synonyms_action: SynonymsAction = proto.Field( + proto.MESSAGE, + number=10, + oneof="action", + message=SynonymsAction, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + associated_serving_config_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + solution_type: common.SolutionType = proto.Field( + proto.ENUM, + number=4, + enum=common.SolutionType, + ) + use_cases: MutableSequence[common.SearchUseCase] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=common.SearchUseCase, + ) + conditions: MutableSequence["Condition"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Condition", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control_service.py new file mode 100644 index 000000000000..9abaa1f96a78 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control_service.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1alpha", + manifest={ + "CreateControlRequest", + "UpdateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + }, +) + + +class CreateControlRequest(proto.Message): + r"""Request for CreateControl method. + + Attributes: + parent (str): + Required. Full resource name of parent data store. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + control (google.cloud.discoveryengine_v1alpha.types.Control): + Required. The Control to create. + control_id (str): + Required. The ID to use for the Control, which will become + the final component of the Control's resource name. + + This value must be within 1-63 characters. Valid characters + are /[a-z][0-9]-_/. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + control: gcd_control.Control = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_control.Control, + ) + control_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateControlRequest(proto.Message): + r"""Request for UpdateControl method. + + Attributes: + control (google.cloud.discoveryengine_v1alpha.types.Control): + Required. The Control to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1alpha.Control] to + update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1alpha.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1alpha.Control.solution_type] + + If not set or empty, all supported fields are updated. + """ + + control: gcd_control.Control = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_control.Control, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteControlRequest(proto.Message): + r"""Request for DeleteControl method. + + Attributes: + name (str): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetControlRequest(proto.Message): + r"""Request for GetControl method. + + Attributes: + name (str): + Required. The resource name of the Control to get. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListControlsRequest(proto.Message): + r"""Request for ListControls method. + + Attributes: + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + page_size (int): + Optional. Maximum number of results to + return. If unspecified, defaults to 50. Max + allowed value is 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListControls`` call. Provide this to retrieve the + subsequent page. + filter (str): + Optional. A filter to apply on the list results. Supported + features: + + - List all the products under the parent branch if + [filter][google.cloud.discoveryengine.v1alpha.ListControlsRequest.filter] + is unset. Currently this field is unsupported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListControlsResponse(proto.Message): + r"""Response for ListControls method. + + Attributes: + controls (MutableSequence[google.cloud.discoveryengine_v1alpha.types.Control]): + All the Controls for a given data store. + next_page_token (str): + Pagination token, if not returned indicates + the last page. + """ + + @property + def raw_page(self): + return self + + controls: MutableSequence[gcd_control.Control] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_control.Control, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py index a19e104e1915..1fa12a557e7b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py @@ -40,9 +40,9 @@ class Conversation(proto.Message): Attributes: name (str): Immutable. Fully qualified name - ``project/*/locations/global/collections/{collection}/dataStore/*/conversations/*`` + ``projects/{project}/locations/global/collections/{collection}/dataStore/*/conversations/*`` or - ``project/*/locations/global/collections/{collection}/engines/*/conversations/*``. + ``projects/{project}/locations/global/collections/{collection}/engines/*/conversations/*``. state (google.cloud.discoveryengine_v1alpha.types.Conversation.State): The state of the Conversation. user_pseudo_id (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py index a21924344206..39a949871fd2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py @@ -393,12 +393,12 @@ class AnswerQueryRequest(proto.Message): session (str): The session resource name. Not required. - When session field is not set, the API is in - sessionless mode. + When session field is not set, the API is in sessionless + mode. - We support auto session mode: users can use the - wildcard symbol “-” as session id. A new id - will be automatically generated and assigned. + We support auto session mode: users can use the wildcard + symbol ``-`` as session ID. A new ID will be automatically + generated and assigned. safety_spec (google.cloud.discoveryengine_v1alpha.types.AnswerQueryRequest.SafetySpec): Model specification. related_questions_spec (google.cloud.discoveryengine_v1alpha.types.AnswerQueryRequest.RelatedQuestionsSpec): @@ -465,6 +465,8 @@ class RelatedQuestionsSpec(proto.Message): class AnswerGenerationSpec(proto.Message): r"""Answer generation specification. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: model_spec (google.cloud.discoveryengine_v1alpha.types.AnswerQueryRequest.AnswerGenerationSpec.ModelSpec): Answer generation model specification. @@ -499,6 +501,16 @@ class AnswerGenerationSpec(proto.Message): field is set to ``true``, we skip generating answers for non-answer seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true`` or unset, the behavior will be determined + automatically by the service. + + This field is a member of `oneof`_ ``_ignore_low_relevant_content``. """ class ModelSpec(proto.Message): @@ -555,6 +567,11 @@ class PromptSpec(proto.Message): proto.BOOL, number=6, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) class SearchSpec(proto.Message): r"""Search specification. @@ -632,6 +649,13 @@ class SearchParams(proto.Message): documents `__ custom_fine_tuning_spec (google.cloud.discoveryengine_v1alpha.types.CustomFineTuningSpec): Custom fine tuning configs. + data_store_specs (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchRequest.DataStoreSpec]): + Specs defining dataStores to filter on in a + search call and configurations for those + dataStores. This is only considered for engines + with multiple dataStores use case. For single + dataStore within an engine, they should use the + specs at the top level. """ max_return_results: int = proto.Field( @@ -661,6 +685,13 @@ class SearchParams(proto.Message): number=6, message=common.CustomFineTuningSpec, ) + data_store_specs: MutableSequence[ + search_service.SearchRequest.DataStoreSpec + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=search_service.SearchRequest.DataStoreSpec, + ) class SearchResultList(proto.Message): r"""Search result list. @@ -906,7 +937,7 @@ class QueryRephraserSpec(proto.Message): Disable query rephraser. max_rephrase_steps (int): Max rephrase steps. - The max number is 10 steps. + The max number is 5 steps. If not set or set to < 1, it will be set to 1 by default. """ @@ -998,6 +1029,8 @@ class AnswerQueryResponse(proto.Message): session field is set and valid in the [AnswerQueryRequest][google.cloud.discoveryengine.v1alpha.AnswerQueryRequest] request. + answer_query_token (str): + A global unique ID used for logging. """ answer: gcd_answer.Answer = proto.Field( @@ -1010,6 +1043,10 @@ class AnswerQueryResponse(proto.Message): number=2, message=gcd_session.Session, ) + answer_query_token: str = proto.Field( + proto.STRING, + number=3, + ) class GetAnswerRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py new file mode 100644 index 000000000000..85992c5f567e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1alpha", + manifest={ + "CustomTuningModel", + }, +) + + +class CustomTuningModel(proto.Message): + r"""Metadata that describes a custom tuned model. + + Attributes: + name (str): + Required. The fully qualified resource name of the model. + + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}`` + model must be an alpha-numerical string with limit of 40 + characters. + display_name (str): + The display name of the model. + model_version (int): + The version of the model. + model_state (google.cloud.discoveryengine_v1alpha.types.CustomTuningModel.ModelState): + The state that the model is in (e.g.``TRAINING`` or + ``TRAINING_FAILED``). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the Model was created at. + training_start_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the model training was initiated. + """ + + class ModelState(proto.Enum): + r"""The state of the model. + + Values: + MODEL_STATE_UNSPECIFIED (0): + Default value. + TRAINING_PAUSED (1): + The model is in a paused training state. + TRAINING (2): + The model is currently training. + TRAINING_COMPLETE (3): + The model has successfully completed + training. + READY_FOR_SERVING (4): + The model is ready for serving. + TRAINING_FAILED (5): + The model training failed. + """ + MODEL_STATE_UNSPECIFIED = 0 + TRAINING_PAUSED = 1 + TRAINING = 2 + TRAINING_COMPLETE = 3 + READY_FOR_SERVING = 4 + TRAINING_FAILED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + model_version: int = proto.Field( + proto.INT64, + number=3, + ) + model_state: ModelState = proto.Field( + proto.ENUM, + number=4, + enum=ModelState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + training_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store_service.py index b0b08ee3ee18..d98a83c8021e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store_service.py @@ -180,8 +180,8 @@ class ListDataStoresRequest(proto.Message): must match the call that provided the page token. Otherwise, an INVALID_ARGUMENT error is returned. filter (str): - Filter by solution type. For example: filter = - 'solution_type:SOLUTION_TYPE_SEARCH' + Filter by solution type. For example: + ``filter = 'solution_type:SOLUTION_TYPE_SEARCH'`` """ parent: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py index b4cae08db273..bbbf2ca1e493 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py @@ -84,8 +84,7 @@ class ListDocumentsRequest(proto.Message): Maximum number of [Document][google.cloud.discoveryengine.v1alpha.Document]s to return. If unspecified, defaults to 100. The maximum - allowed value is 1000. Values above 1000 will be coerced to - 1000. + allowed value is 1000. Values above 1000 are set to 1000. If this field is negative, an ``INVALID_ARGUMENT`` error is returned. @@ -163,7 +162,7 @@ class CreateDocumentRequest(proto.Message): document_id (str): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1alpha.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1alpha.Document.name]. If the caller does not have permission to create the @@ -218,15 +217,15 @@ class UpdateDocumentRequest(proto.Message): [allow_missing][google.cloud.discoveryengine.v1alpha.UpdateDocumentRequest.allow_missing] is not set, a ``NOT_FOUND`` error is returned. allow_missing (bool): - If set to true, and the + If set to ``true`` and the [Document][google.cloud.discoveryengine.v1alpha.Document] is not found, a new - [Document][google.cloud.discoveryengine.v1alpha.Document] - will be created. + [Document][google.cloud.discoveryengine.v1alpha.Document] is + be created. update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided - imported 'document' to update. If not set, will - by default update all fields. + imported 'document' to update. If not set, by + default updates all fields. """ document: gcd_document.Document = proto.Field( @@ -309,12 +308,16 @@ class ProcessedDocumentType(proto.Enum): PARSED_DOCUMENT (1): Available for all data store parsing configs. CHUNKED_DOCUMENT (2): - Only available if ChunkingConfig is enabeld + Only available if ChunkingConfig is enabled on the data store. + PNG_CONVERTED_DOCUMENT (3): + Returns the converted PNG Image bytes if + available. """ PROCESSED_DOCUMENT_TYPE_UNSPECIFIED = 0 PARSED_DOCUMENT = 1 CHUNKED_DOCUMENT = 2 + PNG_CONVERTED_DOCUMENT = 3 class ProcessedDocumentFormat(proto.Enum): r"""The format of the returned processed document. If @@ -324,8 +327,8 @@ class ProcessedDocumentFormat(proto.Enum): PROCESSED_DOCUMENT_FORMAT_UNSPECIFIED (0): Default value. JSON (1): - output format will be a JSON string - representation of processed document. + Output format is a JSON string representation + of processed document. """ PROCESSED_DOCUMENT_FORMAT_UNSPECIFIED = 0 JSON = 1 diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/engine.py index 1d7bab010360..6d97f3e9ddce 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/engine.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/engine.py @@ -138,8 +138,7 @@ class Engine(proto.Message): restriction of the Engine industry vertical is based on [DataStore][google.cloud.discoveryengine.v1alpha.DataStore]: If unspecified, default to ``GENERIC``. Vertical on Engine - has to match vertical of the DataStore liniked to the - engine. + has to match vertical of the DataStore linked to the engine. common_config (google.cloud.discoveryengine_v1alpha.types.Engine.CommonConfig): Common config spec that specifies the metadata of the engine. @@ -387,10 +386,9 @@ class CommonConfig(proto.Message): Attributes: company_name (str): - Immutable. The name of the company, business - or entity that is associated with the engine. - Setting this may help improve LLM related - features. + The name of the company, business or entity + that is associated with the engine. Setting this + may help improve LLM related features. """ company_name: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py index 667ae4abb0ed..40fa41fd061f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py @@ -65,12 +65,35 @@ class CheckGroundingRequest(proto.Message): Required. The resource name of the grounding config, such as ``projects/*/locations/global/groundingConfigs/default_grounding_config``. answer_candidate (str): - Answer candidate to check. + Answer candidate to check. Can have a maximum + length of 1024 characters. facts (MutableSequence[google.cloud.discoveryengine_v1alpha.types.GroundingFact]): List of facts for the grounding check. We support up to 200 facts. grounding_spec (google.cloud.discoveryengine_v1alpha.types.CheckGroundingSpec): Configuration of the grounding check. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. """ grounding_config: str = proto.Field( @@ -91,6 +114,11 @@ class CheckGroundingRequest(proto.Message): number=4, message="CheckGroundingSpec", ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) class CheckGroundingResponse(proto.Message): @@ -145,6 +173,18 @@ class Claim(proto.Message): means that cited_chunks[1], cited_chunks[3], cited_chunks[4] are the facts cited supporting for the claim. A citation to a fact indicates that the claim is supported by the fact. + grounding_check_required (bool): + Indicates that this claim required grounding check. When the + system decided this claim doesn't require + attribution/grounding check, this field will be set to + false. In that case, no grounding check was done for the + claim and therefore + [citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.citation_indices], + and + [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices] + should not be returned. + + This field is a member of `oneof`_ ``_grounding_check_required``. """ start_pos: int = proto.Field( @@ -165,6 +205,11 @@ class Claim(proto.Message): proto.INT32, number=4, ) + grounding_check_required: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) support_score: float = proto.Field( proto.FLOAT, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounding.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounding.py index cf1775b6f6ee..09de56036168 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounding.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounding.py @@ -66,6 +66,9 @@ class FactChunk(proto.Message): GroundingFacts provided in the request then this field will contain the index of the specific fact from which this chunk was retrieved. + index (int): + The index of this chunk. Currently, only used + for the streaming mode. source_metadata (MutableMapping[str, str]): More fine-grained information for the source reference. @@ -79,6 +82,10 @@ class FactChunk(proto.Message): proto.STRING, number=2, ) + index: int = proto.Field( + proto.INT32, + number=4, + ) source_metadata: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py index b45cb43efa98..68961fb998de 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py @@ -55,10 +55,10 @@ class GcsSource(proto.Message): Attributes: input_uris (MutableSequence[str]): - Required. Cloud Storage URIs to input files. URI can be up - to 2000 characters long. URIs can match the full object path - (for example, ``gs://bucket/directory/object.json``) or a - pattern matching one or more files, such as + Required. Cloud Storage URIs to input files. Each URI can be + up to 2000 characters long. URIs can match the full object + path (for example, ``gs://bucket/directory/object.json``) or + a pattern matching one or more files, such as ``gs://bucket/directory/*.json``. A request can contain at most 100 files (or 100,000 files if @@ -88,7 +88,7 @@ class GcsSource(proto.Message): as a Document. This can only be used by the GENERIC Data Store vertical. - Supported values for user even imports: + Supported values for user event imports: - ``user_event`` (default): One JSON [UserEvent][google.cloud.discoveryengine.v1alpha.UserEvent] @@ -551,9 +551,9 @@ class FirestoreSource(proto.Message): Required. The Firestore database to copy the data from with a length limit of 256 characters. collection_id (str): - Required. The Firestore collection to copy - the data from with a length limit of 1,500 - characters. + Required. The Firestore collection (or + entity) to copy the data from with a length + limit of 1,500 characters. gcs_staging_dir (str): Intermediate Cloud Storage directory used for the import with a length limit of 2,000 diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/rank_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/rank_service.py index f2cc0ac2ac0f..48a6d07cc725 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/rank_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/rank_service.py @@ -103,6 +103,28 @@ class RankRequest(proto.Message): If true, the response will contain only record ID and score. By default, it is false, the response will contain record details. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. """ ranking_config: str = proto.Field( @@ -130,6 +152,11 @@ class RankRequest(proto.Message): proto.BOOL, number=6, ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) class RankResponse(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/recommendation_service.py index 44b3b9053fe9..32a5b448c4a9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/recommendation_service.py @@ -44,9 +44,8 @@ class RecommendRequest(proto.Message): ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` One default serving config is created along with your - recommendation engine creation. The engine ID will be used - as the ID of the default serving config. For example, for - Engine + recommendation engine creation. The engine ID is used as the + ID of the default serving config. For example, for Engine ``projects/*/locations/global/collections/*/engines/my-engine``, you can use ``projects/*/locations/global/collections/*/engines/my-engine/servingConfigs/my-engine`` @@ -75,9 +74,9 @@ class RecommendRequest(proto.Message): page_size (int): Maximum number of results to return. Set this property to the number of recommendation results - needed. If zero, the service will choose a + needed. If zero, the service chooses a reasonable default. The maximum allowed value is - 100. Values above 100 will be coerced to 100. + 100. Values above 100 are set to 100. filter (str): Filter for restricting recommendation results with a length limit of 5,000 characters. Currently, only filter @@ -98,41 +97,39 @@ class RecommendRequest(proto.Message): - (available: true) AND (launguage: ANY("en", "es")) OR (categories: ANY("Movie")) - If your filter blocks all results, the API will return - generic (unfiltered) popular Documents. If you only want - results strictly matching the filters, set - ``strictFiltering`` to True in + If your filter blocks all results, the API returns generic + (unfiltered) popular Documents. If you only want results + strictly matching the filters, set ``strictFiltering`` to + ``true`` in [RecommendRequest.params][google.cloud.discoveryengine.v1alpha.RecommendRequest.params] to receive empty results instead. - Note that the API will never return + Note that the API never returns [Document][google.cloud.discoveryengine.v1alpha.Document]s - with ``storageStatus`` of ``EXPIRED`` or ``DELETED`` + with ``storageStatus`` as ``EXPIRED`` or ``DELETED`` regardless of filter choices. validate_only (bool): - Use validate only mode for this - recommendation query. If set to true, a fake - model will be used that returns arbitrary - Document IDs. Note that the validate only mode - should only be used for testing the API, or if - the model is not ready. + Use validate only mode for this recommendation query. If set + to ``true``, a fake model is used that returns arbitrary + Document IDs. Note that the validate only mode should only + be used for testing the API, or if the model is not ready. params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional domain specific parameters for the recommendations. Allowed values: - - ``returnDocument``: Boolean. If set to true, the - associated Document object will be returned in + - ``returnDocument``: Boolean. If set to ``true``, the + associated Document object is returned in [RecommendResponse.RecommendationResult.document][google.cloud.discoveryengine.v1alpha.RecommendResponse.RecommendationResult.document]. - ``returnScore``: Boolean. If set to true, the - recommendation 'score' corresponding to each returned - Document will be set in + recommendation score corresponding to each returned + Document is set in [RecommendResponse.RecommendationResult.metadata][google.cloud.discoveryengine.v1alpha.RecommendResponse.RecommendationResult.metadata]. - The given 'score' indicates the probability of a Document + The given score indicates the probability of a Document conversion given the user's context and history. - ``strictFiltering``: Boolean. True by default. If set to - false, the service will return generic (unfiltered) + ``false``, the service returns generic (unfiltered) popular Documents instead of empty if your filter blocks all recommendation results. - ``diversityLevel``: String. Default empty. If set to be @@ -241,7 +238,7 @@ class RecommendationResult(proto.Message): Set if ``returnDocument`` is set to true in [RecommendRequest.params][google.cloud.discoveryengine.v1alpha.RecommendRequest.params]. metadata (MutableMapping[str, google.protobuf.struct_pb2.Value]): - Additional Document metadata / annotations. + Additional Document metadata or annotations. Possible values: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema.py index 2d8b06c94a3e..aabad9f12b6e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema.py @@ -247,6 +247,20 @@ class FieldConfig(proto.Message): METATAGS, we will only index ````; if this enum is not set, we will merge them and index ````. + schema_org_paths (MutableSequence[str]): + Field paths for indexing custom attribute from schema.org + data. More details of schema.org and its defined types can + be found at `schema.org `__. + + It is only used on advanced site search schema. + + Currently only support full path from root. The full path to + a field is constructed by concatenating field names, + starting from ``_root``, with a period ``.`` as the + delimiter. Examples: + + - Publish date of the root: \_root.datePublished + - Publish date of the reviews: \_root.review.datePublished """ class FieldType(proto.Enum): @@ -266,9 +280,31 @@ class FieldType(proto.Enum): BOOLEAN (5): Field value type is Boolean. GEOLOCATION (6): - Field value type is Geolocation. + Field value type is Geolocation. Geolocation is expressed as + an object with the following keys: + + - ``id``: a string representing the location id + - ``longitude``: a number representing the longitude + coordinate of the location + - ``latitude``: a number repesenting the latitude + coordinate of the location + - ``address``: a string representing the full address of + the location + + ``latitude`` and ``longitude`` must always be provided + together. At least one of a) ``address`` or b) + ``latitude``-``longitude`` pair must be provided. DATETIME (7): - Field value type is Datetime. + Field value type is Datetime. Datetime can be expressed as + either: + + - a number representing milliseconds-since-the-epoch + - a string representing milliseconds-since-the-epoch. e.g. + ``"1420070400001"`` + - a string representing the `ISO + 8601 `__ date or + date and time. e.g. ``"2015-01-01"`` or + ``"2015-01-01T12:10:30Z"`` """ FIELD_TYPE_UNSPECIFIED = 0 OBJECT = 1 @@ -387,10 +423,13 @@ class AdvancedSiteSearchDataSource(proto.Enum): Retrieve value from meta tag. PAGEMAP (2): Retrieve value from page map. + SCHEMA_ORG (4): + Retrieve value from schema.org data. """ ADVANCED_SITE_SEARCH_DATA_SOURCE_UNSPECIFIED = 0 METATAGS = 1 PAGEMAP = 2 + SCHEMA_ORG = 4 field_path: str = proto.Field( proto.STRING, @@ -442,6 +481,10 @@ class AdvancedSiteSearchDataSource(proto.Enum): number=10, enum=AdvancedSiteSearchDataSource, ) + schema_org_paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema_service.py index 3e0019fc62b5..38efd5138033 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/schema_service.py @@ -72,11 +72,11 @@ class ListSchemasRequest(proto.Message): return. The service may return fewer than this value. If unspecified, at most 100 - [Schema][google.cloud.discoveryengine.v1alpha.Schema]s will - be returned. + [Schema][google.cloud.discoveryengine.v1alpha.Schema]s are + returned. - The maximum value is 1000; values above 1000 will be coerced - to 1000. + The maximum value is 1000; values above 1000 are set to + 1000. page_token (str): A page token, received from a previous [SchemaService.ListSchemas][google.cloud.discoveryengine.v1alpha.SchemaService.ListSchemas] @@ -148,7 +148,7 @@ class CreateSchemaRequest(proto.Message): schema_id (str): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1alpha.Schema], which - will become the final component of the + becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1alpha.Schema.name]. This field should conform to @@ -185,8 +185,8 @@ class UpdateSchemaRequest(proto.Message): If set to true, and the [Schema][google.cloud.discoveryengine.v1alpha.Schema] is not found, a new - [Schema][google.cloud.discoveryengine.v1alpha.Schema] will - be created. In this situation, ``update_mask`` is ignored. + [Schema][google.cloud.discoveryengine.v1alpha.Schema] is + created. In this situation, ``update_mask`` is ignored. """ schema: gcd_schema.Schema = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py index 7664384e0b73..808fa372ae30 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py @@ -93,8 +93,12 @@ class SearchRequest(proto.Message): If this field is negative, an ``INVALID_ARGUMENT`` is returned. data_store_specs (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchRequest.DataStoreSpec]): - A list of data store specs to apply on a - search call. + Specs defining dataStores to filter on in a + search call and configurations for those + dataStores. This is only considered for engines + with multiple dataStores use case. For single + dataStore within an engine, they should use the + specs at the top level. filter (str): The filter syntax consists of an expression language for constructing a predicate from one or more fields of the @@ -133,8 +137,9 @@ class SearchRequest(proto.Message): ordered by a field in an [Document][google.cloud.discoveryengine.v1alpha.Document] object. Leave it unset if ordered by relevance. ``order_by`` - expression is case-sensitive. For more information on - ordering, see + expression is case-sensitive. + + For more information on ordering for retail search, see `Ordering `__ If this field is unrecognizable, an ``INVALID_ARGUMENT`` is @@ -153,7 +158,7 @@ class SearchRequest(proto.Message): boost_spec (google.cloud.discoveryengine_v1alpha.types.SearchRequest.BoostSpec): Boost specification to boost certain documents. For more information on boosting, see - `Boosting `__ + `Boosting `__ params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional search parameters. @@ -161,7 +166,8 @@ class SearchRequest(proto.Message): - ``user_country_code``: string. Default empty. If set to non-empty, results are restricted or boosted based on the - location provided. Example: user_country_code: "au" + location provided. For example, + ``user_country_code: "au"`` For available codes see `Country Codes `__ @@ -169,7 +175,7 @@ class SearchRequest(proto.Message): - ``search_type``: double. Default empty. Enables non-webpage searching depending on the value. The only valid non-default value is 1, which enables image - searching. Example: search_type: 1 + searching. For example, ``search_type: 1`` query_expansion_spec (google.cloud.discoveryengine_v1alpha.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the conditions under which query @@ -281,7 +287,10 @@ class ImageQuery(proto.Message): ) class DataStoreSpec(proto.Message): - r"""A struct to define data stores to filter on in a search call. + r"""A struct to define data stores to filter on in a search call and + configurations for those data stores. A maximum of 1 DataStoreSpec + per data_store is allowed. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. Attributes: data_store (str): @@ -303,9 +312,9 @@ class FacetSpec(proto.Message): facet_key (google.cloud.discoveryengine_v1alpha.types.SearchRequest.FacetSpec.FacetKey): Required. The facet key specification. limit (int): - Maximum of facet values that should be returned for this - facet. If unspecified, defaults to 20. The maximum allowed - value is 300. Values above 300 are coerced to 300. + Maximum facet values that are returned for this facet. If + unspecified, defaults to 20. The maximum allowed value is + 300. Values above 300 are coerced to 300. If this field is negative, an ``INVALID_ARGUMENT`` is returned. @@ -407,7 +416,7 @@ class FacetKey(proto.Message): 2021". Only supported on textual fields. Maximum is 10. contains (MutableSequence[str]): - Only get facet values that contains the given + Only get facet values that contain the given strings. For example, suppose "category" has three values "Action > 2022", "Action > 2021" and "Sci-Fi > 2022". If set "contains" to @@ -585,7 +594,7 @@ class AttributeType(proto.Enum): datetime field specified. The value must be formatted as an XSD ``dayTimeDuration`` value (a restricted subset of an ISO 8601 duration value). The pattern for this is: - ``[nD][T[nH][nM][nS]]``. E.g. ``5D``, ``3DT12H30M``, + ``[nD][T[nH][nM][nS]]``. For example, ``5D``, ``3DT12H30M``, ``T24H``. """ ATTRIBUTE_TYPE_UNSPECIFIED = 0 @@ -732,8 +741,8 @@ class SpellCorrectionSpec(proto.Message): Attributes: mode (google.cloud.discoveryengine_v1alpha.types.SearchRequest.SpellCorrectionSpec.Mode): - The mode under which spell correction should take effect to - replace the original search query. Default to + The mode under which spell correction replaces the original + search query. Defaults to [Mode.AUTO][google.cloud.discoveryengine.v1alpha.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. """ @@ -747,10 +756,10 @@ class Mode(proto.Enum): behavior defaults to [Mode.AUTO][google.cloud.discoveryengine.v1alpha.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. SUGGESTION_ONLY (1): - Search API will try to find a spell suggestion if there is - any and put in the + Search API tries to find a spelling suggestion. If a + suggestion is found, it is put in the [SearchResponse.corrected_query][google.cloud.discoveryengine.v1alpha.SearchResponse.corrected_query]. - The spell suggestion will not be used as the search query. + The spelling suggestion won't be used as the search query. AUTO (2): Automatic spell correction built by the Search API. Search will be based on the @@ -782,12 +791,7 @@ class ContentSearchSpec(proto.Message): be no extractive answer in the search response. search_result_mode (google.cloud.discoveryengine_v1alpha.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + result mode defaults to ``DOCUMENTS``. chunk_spec (google.cloud.discoveryengine_v1alpha.types.SearchRequest.ContentSearchSpec.ChunkSpec): Specifies the chunk spec to be returned from the search response. Only available if the @@ -798,12 +802,7 @@ class ContentSearchSpec(proto.Message): class SearchResultMode(proto.Enum): r"""Specifies the search result mode. If unspecified, the search result - mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] is - specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + mode defaults to ``DOCUMENTS``. Values: SEARCH_RESULT_MODE_UNSPECIFIED (0): @@ -1360,9 +1359,8 @@ class SearchResult(proto.Message): of the searched [Document][google.cloud.discoveryengine.v1alpha.Document]. document (google.cloud.discoveryengine_v1alpha.types.Document): - The document data snippet in the search - response. Only fields that are marked as - retrievable are populated. + The document data snippet in the search response. Only + fields that are marked as ``retrievable`` are populated. chunk (google.cloud.discoveryengine_v1alpha.types.Chunk): The chunk data in the search response if the [SearchRequest.ContentSearchSpec.search_result_mode][google.cloud.discoveryengine.v1alpha.SearchRequest.ContentSearchSpec.search_result_mode] @@ -1398,8 +1396,8 @@ class Facet(proto.Message): Attributes: key (str): - The key for this facet. E.g., "colors" or "price". It - matches + The key for this facet. For example, ``"colors"`` or + ``"price"``. It matches [SearchRequest.FacetSpec.FacetKey.key][google.cloud.discoveryengine.v1alpha.SearchRequest.FacetSpec.FacetKey.key]. values (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchResponse.Facet.FacetValue]): The facet values for this field. @@ -1482,10 +1480,11 @@ class RefinementAttribute(proto.Message): Attributes: attribute_key (str): - Attribute key used to refine the results e.g. 'movie_type'. + Attribute key used to refine the results. For example, + ``"movie_type"``. attribute_value (str): - Attribute value used to refine the results - e.g. 'drama'. + Attribute value used to refine the results. For example, + ``"drama"``. """ attribute_key: str = proto.Field( @@ -1510,7 +1509,7 @@ class RefinementAttribute(proto.Message): ) class Summary(proto.Message): - r"""Summary of the top N search result specified by the summary + r"""Summary of the top N search results specified by the summary spec. Attributes: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py index e4e4183209cc..2796f078aa54 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py @@ -21,11 +21,16 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.discoveryengine_v1alpha.types import import_config +from google.cloud.discoveryengine_v1alpha.types import ( + custom_tuning_model, + import_config, +) __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1alpha", manifest={ + "ListCustomModelsRequest", + "ListCustomModelsResponse", "TrainCustomModelRequest", "TrainCustomModelResponse", "TrainCustomModelMetadata", @@ -33,6 +38,45 @@ ) +class ListCustomModelsRequest(proto.Message): + r"""Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + + Attributes: + data_store (str): + Required. The resource name of the parent Data Store, such + as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to fetch + the models from. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCustomModelsResponse(proto.Message): + r"""Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels] + method. + + Attributes: + models (MutableSequence[google.cloud.discoveryengine_v1alpha.types.CustomTuningModel]): + List of custom tuning models. + """ + + models: MutableSequence[ + custom_tuning_model.CustomTuningModel + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=custom_tuning_model.CustomTuningModel, + ) + + class TrainCustomModelRequest(proto.Message): r"""Request message for [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel] @@ -59,6 +103,8 @@ class TrainCustomModelRequest(proto.Message): error_config (google.cloud.discoveryengine_v1alpha.types.ImportErrorConfig): The desired location of errors incurred during the data ingestion and training. + model_id (str): + If not provided, a UUID will be generated. """ class GcsTrainingInput(proto.Message): @@ -137,6 +183,10 @@ class GcsTrainingInput(proto.Message): number=4, message=import_config.ImportErrorConfig, ) + model_id: str = proto.Field( + proto.STRING, + number=5, + ) class TrainCustomModelResponse(proto.Message): @@ -166,6 +216,9 @@ class TrainCustomModelResponse(proto.Message): - **ready**: The model is ready for serving. metrics (MutableMapping[str, float]): The metrics of the trained model. + model_name (str): + Fully qualified name of the + CustomTuningModel. """ error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( @@ -187,6 +240,10 @@ class TrainCustomModelResponse(proto.Message): proto.DOUBLE, number=4, ) + model_name: str = proto.Field( + proto.STRING, + number=5, + ) class TrainCustomModelMetadata(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/session.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/session.py index 9bac5bcd8a3f..b4969b963c95 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/session.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/session.py @@ -70,6 +70,8 @@ class Turn(proto.Message): answer (str): The resource name of the answer to the user query. + Only set if the answer generation (/answer API + call) happened in this turn. """ query: "Query" = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py index 28a0ed747f87..59745db21bf1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py @@ -75,6 +75,8 @@ class TargetSite(proto.Message): generated_uri_pattern (str): Output only. This is system-generated based on the provided_uri_pattern. + root_domain_uri (str): + Output only. Root domain of the provided_uri_pattern. site_verification_info (google.cloud.discoveryengine_v1alpha.types.SiteVerificationInfo): Output only. Site ownership and validity verification status. @@ -188,6 +190,10 @@ class QuotaFailure(proto.Message): proto.STRING, number=4, ) + root_domain_uri: str = proto.Field( + proto.STRING, + number=10, + ) site_verification_info: "SiteVerificationInfo" = proto.Field( proto.MESSAGE, number=7, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py index 5a84a19fb353..4deac76799fd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py @@ -40,7 +40,7 @@ class UserEvent(proto.Message): r"""UserEvent captures all metadata information Discovery Engine - API needs to know about how end users interact with customers' + API needs to know about how end users interact with your website. Attributes: @@ -89,6 +89,28 @@ class UserEvent(proto.Message): to use Google Analytics `Client ID `__ for this field. + engine (str): + The [Engine][google.cloud.discoveryengine.v1alpha.Engine] + resource name, in the form of + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + Optional. Only required for + [Engine][google.cloud.discoveryengine.v1alpha.Engine] + produced user events. For example, user events from blended + search. + data_store (str): + The + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore] + resource full name, of the form + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + Optional. Only required for user events whose data store + can't by determined by + [UserEvent.engine][google.cloud.discoveryengine.v1alpha.UserEvent.engine] + or + [UserEvent.documents][google.cloud.discoveryengine.v1alpha.UserEvent.documents]. + If data store is set in the parent of write/import/collect + user event requests, this field can be omitted. event_time (google.protobuf.timestamp_pb2.Timestamp): Only required for [UserEventService.ImportUserEvents][google.cloud.discoveryengine.v1alpha.UserEventService.ImportUserEvents] @@ -217,8 +239,7 @@ class UserEvent(proto.Message): A list of identifiers for the independent experiment groups this user event belongs to. This is used to distinguish between user events - associated with different experiment setups on - the customer end. + associated with different experiment setups. promotion_ids (MutableSequence[str]): The promotion IDs if this is an event associated with promotions. Currently, this @@ -264,6 +285,14 @@ class UserEvent(proto.Message): proto.STRING, number=2, ) + engine: str = proto.Field( + proto.STRING, + number=19, + ) + data_store: str = proto.Field( + proto.STRING, + number=20, + ) event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, @@ -634,8 +663,8 @@ class DocumentInfo(proto.Message): Quantity of the Document associated with the user event. Defaults to 1. - For example, this field will be 2 if two quantities of the - same Document are involved in a ``add-to-cart`` event. + For example, this field is 2 if two quantities of the same + Document are involved in a ``add-to-cart`` event. Required for events of the following event types: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event_service.py index 82b2ed553028..d28cef4cee43 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event_service.py @@ -37,12 +37,25 @@ class WriteUserEventRequest(proto.Message): Attributes: parent (str): - Required. The parent DataStore resource name, such as + Required. The parent resource name. If the write user event + action is applied in + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore] + level, the format is: ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + If the write user event action is applied in [Location][] + level, for example, the event with + [Document][google.cloud.discoveryengine.v1alpha.Document] + across multiple + [DataStore][google.cloud.discoveryengine.v1alpha.DataStore], + the format is: ``projects/{project}/locations/{location}``. user_event (google.cloud.discoveryengine_v1alpha.types.UserEvent): Required. User event to write. This field is a member of `oneof`_ ``_user_event``. + write_async (bool): + If set to true, the user event is written + asynchronously after validation, and the API + responds without waiting for the write. """ parent: str = proto.Field( @@ -55,6 +68,10 @@ class WriteUserEventRequest(proto.Message): optional=True, message=gcd_user_event.UserEvent, ) + write_async: bool = proto.Field( + proto.BOOL, + number=3, + ) class CollectUserEventRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py index 880db4a719b5..450901aa5f10 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py @@ -22,6 +22,7 @@ CompletionServiceAsyncClient, CompletionServiceClient, ) +from .services.control_service import ControlServiceAsyncClient, ControlServiceClient from .services.conversational_search_service import ( ConversationalSearchServiceAsyncClient, ConversationalSearchServiceClient, @@ -36,6 +37,7 @@ GroundedGenerationServiceAsyncClient, GroundedGenerationServiceClient, ) +from .services.project_service import ProjectServiceAsyncClient, ProjectServiceClient from .services.rank_service import RankServiceAsyncClient, RankServiceClient from .services.recommendation_service import ( RecommendationServiceAsyncClient, @@ -68,11 +70,21 @@ Interval, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) from .types.completion import SuggestionDenyListEntry from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse +from .types.control import Condition, Control +from .types.control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from .types.conversation import ( Conversation, ConversationContext, @@ -99,6 +111,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .types.custom_tuning_model import CustomTuningModel from .types.data_store import DataStore from .types.data_store_service import ( CreateDataStoreMetadata, @@ -162,6 +175,8 @@ ImportUserEventsResponse, SpannerSource, ) +from .types.project import Project +from .types.project_service import ProvisionProjectMetadata, ProvisionProjectRequest from .types.purge_config import ( PurgeDocumentsMetadata, PurgeDocumentsRequest, @@ -186,6 +201,8 @@ ) from .types.search_service import SearchRequest, SearchResponse from .types.search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, TrainCustomModelMetadata, TrainCustomModelRequest, TrainCustomModelResponse, @@ -242,11 +259,13 @@ __all__ = ( "CompletionServiceAsyncClient", + "ControlServiceAsyncClient", "ConversationalSearchServiceAsyncClient", "DataStoreServiceAsyncClient", "DocumentServiceAsyncClient", "EngineServiceAsyncClient", "GroundedGenerationServiceAsyncClient", + "ProjectServiceAsyncClient", "RankServiceAsyncClient", "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", @@ -276,12 +295,16 @@ "CompleteQueryResponse", "CompletionInfo", "CompletionServiceClient", + "Condition", + "Control", + "ControlServiceClient", "Conversation", "ConversationContext", "ConversationMessage", "ConversationalSearchServiceClient", "ConverseConversationRequest", "ConverseConversationResponse", + "CreateControlRequest", "CreateConversationRequest", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -294,8 +317,10 @@ "CreateTargetSiteMetadata", "CreateTargetSiteRequest", "CustomAttribute", + "CustomTuningModel", "DataStore", "DataStoreServiceClient", + "DeleteControlRequest", "DeleteConversationRequest", "DeleteDataStoreMetadata", "DeleteDataStoreRequest", @@ -328,6 +353,7 @@ "FirestoreSource", "GcsSource", "GetAnswerRequest", + "GetControlRequest", "GetConversationRequest", "GetDataStoreRequest", "GetDocumentRequest", @@ -351,8 +377,12 @@ "ImportUserEventsResponse", "IndustryVertical", "Interval", + "ListControlsRequest", + "ListControlsResponse", "ListConversationsRequest", "ListConversationsResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "ListDataStoresRequest", "ListDataStoresResponse", "ListDocumentsRequest", @@ -371,6 +401,10 @@ "PageInfo", "PanelInfo", "PauseEngineRequest", + "Project", + "ProjectServiceClient", + "ProvisionProjectMetadata", + "ProvisionProjectRequest", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", @@ -399,6 +433,7 @@ "SearchServiceClient", "SearchTier", "SearchTuningServiceClient", + "SearchUseCase", "ServingConfig", "ServingConfigServiceClient", "Session", @@ -417,6 +452,7 @@ "TuneEngineMetadata", "TuneEngineRequest", "TuneEngineResponse", + "UpdateControlRequest", "UpdateConversationRequest", "UpdateDataStoreRequest", "UpdateDocumentRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json index 739ee7f9b30f..d039c25b28ac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json @@ -69,6 +69,100 @@ } } }, + "ControlService": { + "clients": { + "grpc": { + "libraryClient": "ControlServiceClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ControlServiceAsyncClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + }, + "rest": { + "libraryClient": "ControlServiceClient", + "rpcs": { + "CreateControl": { + "methods": [ + "create_control" + ] + }, + "DeleteControl": { + "methods": [ + "delete_control" + ] + }, + "GetControl": { + "methods": [ + "get_control" + ] + }, + "ListControls": { + "methods": [ + "list_controls" + ] + }, + "UpdateControl": { + "methods": [ + "update_control" + ] + } + } + } + } + }, "ConversationalSearchService": { "clients": { "grpc": { @@ -674,6 +768,40 @@ } } }, + "ProjectService": { + "clients": { + "grpc": { + "libraryClient": "ProjectServiceClient", + "rpcs": { + "ProvisionProject": { + "methods": [ + "provision_project" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ProjectServiceAsyncClient", + "rpcs": { + "ProvisionProject": { + "methods": [ + "provision_project" + ] + } + } + }, + "rest": { + "libraryClient": "ProjectServiceClient", + "rpcs": { + "ProvisionProject": { + "methods": [ + "provision_project" + ] + } + } + } + } + }, "RankService": { "clients": { "grpc": { @@ -875,6 +1003,11 @@ "grpc": { "libraryClient": "SearchTuningServiceClient", "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, "TrainCustomModel": { "methods": [ "train_custom_model" @@ -885,6 +1018,11 @@ "grpc-async": { "libraryClient": "SearchTuningServiceAsyncClient", "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, "TrainCustomModel": { "methods": [ "train_custom_model" @@ -895,6 +1033,11 @@ "rest": { "libraryClient": "SearchTuningServiceClient", "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, "TrainCustomModel": { "methods": [ "train_custom_model" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py index 7d9103f0702a..e8150f0dcfe6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py @@ -688,6 +688,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "CompletionServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py index 9411f347566a..1c02bb20c456 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py @@ -1120,6 +1120,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py index 07dd919c8f9a..20de404139c0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py @@ -214,6 +214,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py index 01447f0be2c0..22924dabf918 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py @@ -353,6 +353,23 @@ def purge_suggestion_deny_list_entries( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py index fbba80e806ae..bb5a35a057c1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py @@ -381,6 +381,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py index 5402ab6c0835..18cca7b93783 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py @@ -179,6 +179,27 @@ def post_purge_suggestion_deny_list_entries( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -333,6 +354,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -808,6 +841,76 @@ def purge_suggestion_deny_list_entries( # In C++ this would require a dynamic_cast return self._PurgeSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/__init__.py new file mode 100644 index 000000000000..382ea5d38bcc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ControlServiceAsyncClient +from .client import ControlServiceClient + +__all__ = ( + "ControlServiceClient", + "ControlServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py new file mode 100644 index 000000000000..922f4a1aa0f4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py @@ -0,0 +1,1066 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.control_service import pagers +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service + +from .client import ControlServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .transports.grpc_asyncio import ControlServiceGrpcAsyncIOTransport + + +class ControlServiceAsyncClient: + """Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + """ + + _client: ControlServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ControlServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ControlServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ControlServiceClient._DEFAULT_UNIVERSE + + control_path = staticmethod(ControlServiceClient.control_path) + parse_control_path = staticmethod(ControlServiceClient.parse_control_path) + data_store_path = staticmethod(ControlServiceClient.data_store_path) + parse_data_store_path = staticmethod(ControlServiceClient.parse_data_store_path) + common_billing_account_path = staticmethod( + ControlServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ControlServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ControlServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ControlServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ControlServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ControlServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ControlServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ControlServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ControlServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ControlServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceAsyncClient: The constructed client. + """ + return ControlServiceClient.from_service_account_info.__func__(ControlServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceAsyncClient: The constructed client. + """ + return ControlServiceClient.from_service_account_file.__func__(ControlServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ControlServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ControlServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ControlServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ControlServiceClient).get_transport_class, type(ControlServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ControlServiceTransport, Callable[..., ControlServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the control service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ControlServiceTransport,Callable[..., ControlServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ControlServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ControlServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_control( + self, + request: Optional[Union[control_service.CreateControlRequest, dict]] = None, + *, + parent: Optional[str] = None, + control: Optional[gcd_control.Control] = None, + control_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to create + already exists, an ALREADY_EXISTS error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_create_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = await client.create_control(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.CreateControlRequest, dict]]): + The request object. Request for CreateControl method. + parent (:class:`str`): + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control (:class:`google.cloud.discoveryengine_v1beta.types.Control`): + Required. The Control to create. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control_id (:class:`str`): + Required. The ID to use for the Control, which will + become the final component of the Control's resource + name. + + This value must be within 1-63 characters. Valid + characters are /[a-z][0-9]-_/. + + This corresponds to the ``control_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, control, control_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.CreateControlRequest): + request = control_service.CreateControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if control is not None: + request.control = control + if control_id is not None: + request.control_id = control_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_control( + self, + request: Optional[Union[control_service.DeleteControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1beta.Control] to + delete does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_delete_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteControlRequest( + name="name_value", + ) + + # Make the request + await client.delete_control(request=request) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.DeleteControlRequest, dict]]): + The request object. Request for DeleteControl method. + name (:class:`str`): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.DeleteControlRequest): + request = control_service.DeleteControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_control( + self, + request: Optional[Union[control_service.UpdateControlRequest, dict]] = None, + *, + control: Optional[gcd_control.Control] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Updates a Control. + + [Control][google.cloud.discoveryengine.v1beta.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to update + does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_update_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.UpdateControlRequest( + control=control, + ) + + # Make the request + response = await client.update_control(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.UpdateControlRequest, dict]]): + The request object. Request for UpdateControl method. + control (:class:`google.cloud.discoveryengine_v1beta.types.Control`): + Required. The Control to update. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1beta.Control] + to update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1beta.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1beta.Control.solution_type] + + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([control, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.UpdateControlRequest): + request = control_service.UpdateControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if control is not None: + request.control = control + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("control.name", request.control.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_control( + self, + request: Optional[Union[control_service.GetControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Gets a Control. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetControlRequest( + name="name_value", + ) + + # Make the request + response = await client.get_control(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetControlRequest, dict]]): + The request object. Request for GetControl method. + name (:class:`str`): + Required. The resource name of the Control to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.GetControlRequest): + request = control_service.GetControlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_control + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_controls( + self, + request: Optional[Union[control_service.ListControlsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListControlsAsyncPager: + r"""Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_controls(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListControlsRequest, dict]]): + The request object. Request for ListControls method. + parent (:class:`str`): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.control_service.pagers.ListControlsAsyncPager: + Response for ListControls method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.ListControlsRequest): + request = control_service.ListControlsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_controls + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListControlsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ControlServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ControlServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py new file mode 100644 index 000000000000..d327c68ea847 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py @@ -0,0 +1,1500 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.control_service import pagers +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service + +from .transports.base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .transports.grpc import ControlServiceGrpcTransport +from .transports.grpc_asyncio import ControlServiceGrpcAsyncIOTransport +from .transports.rest import ControlServiceRestTransport + + +class ControlServiceClientMeta(type): + """Metaclass for the ControlService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ControlServiceTransport]] + _transport_registry["grpc"] = ControlServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ControlServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ControlServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ControlServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ControlServiceClient(metaclass=ControlServiceClientMeta): + """Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ControlServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ControlServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ControlServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def control_path( + project: str, + location: str, + data_store: str, + control: str, + ) -> str: + """Returns a fully-qualified control string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/controls/{control}".format( + project=project, + location=location, + data_store=data_store, + control=control, + ) + + @staticmethod + def parse_control_path(path: str) -> Dict[str, str]: + """Parses a control path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/controls/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ControlServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ControlServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ControlServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ControlServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ControlServiceTransport, Callable[..., ControlServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the control service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ControlServiceTransport,Callable[..., ControlServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ControlServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ControlServiceClient._read_environment_variables() + self._client_cert_source = ControlServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ControlServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ControlServiceTransport) + if transport_provided: + # transport is a ControlServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ControlServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ControlServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ControlServiceTransport], Callable[..., ControlServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ControlServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_control( + self, + request: Optional[Union[control_service.CreateControlRequest, dict]] = None, + *, + parent: Optional[str] = None, + control: Optional[gcd_control.Control] = None, + control_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to create + already exists, an ALREADY_EXISTS error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_create_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = client.create_control(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.CreateControlRequest, dict]): + The request object. Request for CreateControl method. + parent (str): + Required. Full resource name of parent data store. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control (google.cloud.discoveryengine_v1beta.types.Control): + Required. The Control to create. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + control_id (str): + Required. The ID to use for the Control, which will + become the final component of the Control's resource + name. + + This value must be within 1-63 characters. Valid + characters are /[a-z][0-9]-_/. + + This corresponds to the ``control_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, control, control_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.CreateControlRequest): + request = control_service.CreateControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if control is not None: + request.control = control + if control_id is not None: + request.control_id = control_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_control( + self, + request: Optional[Union[control_service.DeleteControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1beta.Control] to + delete does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_delete_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteControlRequest( + name="name_value", + ) + + # Make the request + client.delete_control(request=request) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.DeleteControlRequest, dict]): + The request object. Request for DeleteControl method. + name (str): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.DeleteControlRequest): + request = control_service.DeleteControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_control( + self, + request: Optional[Union[control_service.UpdateControlRequest, dict]] = None, + *, + control: Optional[gcd_control.Control] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Updates a Control. + + [Control][google.cloud.discoveryengine.v1beta.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to update + does not exist, a NOT_FOUND error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_update_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.UpdateControlRequest( + control=control, + ) + + # Make the request + response = client.update_control(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.UpdateControlRequest, dict]): + The request object. Request for UpdateControl method. + control (google.cloud.discoveryengine_v1beta.types.Control): + Required. The Control to update. + This corresponds to the ``control`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1beta.Control] + to update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1beta.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1beta.Control.solution_type] + + If not set or empty, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([control, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.UpdateControlRequest): + request = control_service.UpdateControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if control is not None: + request.control = control + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("control.name", request.control.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_control( + self, + request: Optional[Union[control_service.GetControlRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Gets a Control. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetControlRequest( + name="name_value", + ) + + # Make the request + response = client.get_control(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetControlRequest, dict]): + The request object. Request for GetControl method. + name (str): + Required. The resource name of the Control to get. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on SolutionType. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.GetControlRequest): + request = control_service.GetControlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_control] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_controls( + self, + request: Optional[Union[control_service.ListControlsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListControlsPager: + r"""Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_controls(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListControlsRequest, dict]): + The request object. Request for ListControls method. + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.control_service.pagers.ListControlsPager: + Response for ListControls method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control_service.ListControlsRequest): + request = control_service.ListControlsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_controls] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListControlsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ControlServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ControlServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/pagers.py new file mode 100644 index 000000000000..cd5079fa8a48 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import control, control_service + + +class ListControlsPager: + """A pager for iterating through ``list_controls`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListControlsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``controls`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListControls`` requests and continue to iterate + through the ``controls`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListControlsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., control_service.ListControlsResponse], + request: control_service.ListControlsRequest, + response: control_service.ListControlsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListControlsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListControlsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = control_service.ListControlsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[control_service.ListControlsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[control.Control]: + for page in self.pages: + yield from page.controls + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListControlsAsyncPager: + """A pager for iterating through ``list_controls`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListControlsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``controls`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListControls`` requests and continue to iterate + through the ``controls`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListControlsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[control_service.ListControlsResponse]], + request: control_service.ListControlsRequest, + response: control_service.ListControlsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListControlsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListControlsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = control_service.ListControlsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[control_service.ListControlsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[control.Control]: + async def async_generator(): + async for page in self.pages: + for response in page.controls: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/__init__.py new file mode 100644 index 000000000000..19aaa21a8e17 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ControlServiceTransport +from .grpc import ControlServiceGrpcTransport +from .grpc_asyncio import ControlServiceGrpcAsyncIOTransport +from .rest import ControlServiceRestInterceptor, ControlServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ControlServiceTransport]] +_transport_registry["grpc"] = ControlServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ControlServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ControlServiceRestTransport + +__all__ = ( + "ControlServiceTransport", + "ControlServiceGrpcTransport", + "ControlServiceGrpcAsyncIOTransport", + "ControlServiceRestTransport", + "ControlServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/base.py new file mode 100644 index 000000000000..6925e163c964 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/base.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ControlServiceTransport(abc.ABC): + """Abstract transport class for ControlService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_control: gapic_v1.method.wrap_method( + self.create_control, + default_timeout=None, + client_info=client_info, + ), + self.delete_control: gapic_v1.method.wrap_method( + self.delete_control, + default_timeout=None, + client_info=client_info, + ), + self.update_control: gapic_v1.method.wrap_method( + self.update_control, + default_timeout=None, + client_info=client_info, + ), + self.get_control: gapic_v1.method.wrap_method( + self.get_control, + default_timeout=None, + client_info=client_info, + ), + self.list_controls: gapic_v1.method.wrap_method( + self.list_controls, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_control( + self, + ) -> Callable[ + [control_service.CreateControlRequest], + Union[gcd_control.Control, Awaitable[gcd_control.Control]], + ]: + raise NotImplementedError() + + @property + def delete_control( + self, + ) -> Callable[ + [control_service.DeleteControlRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_control( + self, + ) -> Callable[ + [control_service.UpdateControlRequest], + Union[gcd_control.Control, Awaitable[gcd_control.Control]], + ]: + raise NotImplementedError() + + @property + def get_control( + self, + ) -> Callable[ + [control_service.GetControlRequest], + Union[control.Control, Awaitable[control.Control]], + ]: + raise NotImplementedError() + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], + Union[ + control_service.ListControlsResponse, + Awaitable[control_service.ListControlsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ControlServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/grpc.py new file mode 100644 index 000000000000..40c0ca634713 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/grpc.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service + +from .base import DEFAULT_CLIENT_INFO, ControlServiceTransport + + +class ControlServiceGrpcTransport(ControlServiceTransport): + """gRPC backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_control( + self, + ) -> Callable[[control_service.CreateControlRequest], gcd_control.Control]: + r"""Return a callable for the create control method over gRPC. + + Creates a Control. + + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to create + already exists, an ALREADY_EXISTS error is returned. + + Returns: + Callable[[~.CreateControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_control" not in self._stubs: + self._stubs["create_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/CreateControl", + request_serializer=control_service.CreateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["create_control"] + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], empty_pb2.Empty]: + r"""Return a callable for the delete control method over gRPC. + + Deletes a Control. + + If the [Control][google.cloud.discoveryengine.v1beta.Control] to + delete does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.DeleteControlRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_control" not in self._stubs: + self._stubs["delete_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/DeleteControl", + request_serializer=control_service.DeleteControlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_control"] + + @property + def update_control( + self, + ) -> Callable[[control_service.UpdateControlRequest], gcd_control.Control]: + r"""Return a callable for the update control method over gRPC. + + Updates a Control. + + [Control][google.cloud.discoveryengine.v1beta.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to update + does not exist, a NOT_FOUND error is returned. + + Returns: + Callable[[~.UpdateControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_control" not in self._stubs: + self._stubs["update_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/UpdateControl", + request_serializer=control_service.UpdateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, + ) + return self._stubs["update_control"] + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], control.Control]: + r"""Return a callable for the get control method over gRPC. + + Gets a Control. + + Returns: + Callable[[~.GetControlRequest], + ~.Control]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_control" not in self._stubs: + self._stubs["get_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/GetControl", + request_serializer=control_service.GetControlRequest.serialize, + response_deserializer=control.Control.deserialize, + ) + return self._stubs["get_control"] + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], control_service.ListControlsResponse + ]: + r"""Return a callable for the list controls method over gRPC. + + Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + Returns: + Callable[[~.ListControlsRequest], + ~.ListControlsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_controls" not in self._stubs: + self._stubs["list_controls"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/ListControls", + request_serializer=control_service.ListControlsRequest.serialize, + response_deserializer=control_service.ListControlsResponse.deserialize, + ) + return self._stubs["list_controls"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ControlServiceGrpcTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/grpc_asyncio.py similarity index 61% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc_asyncio.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/grpc_asyncio.py index 048fce3cdd9f..a129bf617666 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/grpc_asyncio.py @@ -21,24 +21,27 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service -from .base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport -from .grpc import MapsPlatformDatasetsV1AlphaGrpcTransport +from .base import DEFAULT_CLIENT_INFO, ControlServiceTransport +from .grpc import ControlServiceGrpcTransport -class MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( - MapsPlatformDatasetsV1AlphaTransport -): - """gRPC AsyncIO backend transport for MapsPlatformDatasetsV1Alpha. +class ControlServiceGrpcAsyncIOTransport(ControlServiceTransport): + """gRPC AsyncIO backend transport for ControlService. - Service definition for the Maps Platform Datasets API. + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -54,7 +57,7 @@ class MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( @classmethod def create_channel( cls, - host: str = "mapsplatformdatasets.googleapis.com", + host: str = "discoveryengine.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -96,7 +99,7 @@ def create_channel( def __init__( self, *, - host: str = "mapsplatformdatasets.googleapis.com", + host: str = "discoveryengine.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -114,7 +117,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'mapsplatformdatasets.googleapis.com'). + The hostname to connect to (default: 'discoveryengine.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -243,48 +246,23 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def create_dataset( + def create_control( self, ) -> Callable[ - [maps_platform_datasets.CreateDatasetRequest], Awaitable[gmm_dataset.Dataset] + [control_service.CreateControlRequest], Awaitable[gcd_control.Control] ]: - r"""Return a callable for the create dataset method over gRPC. + r"""Return a callable for the create control method over gRPC. - Create a new dataset for the specified project. + Creates a Control. - Returns: - Callable[[~.CreateDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_dataset" not in self._stubs: - self._stubs["create_dataset"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/CreateDataset", - request_serializer=maps_platform_datasets.CreateDatasetRequest.serialize, - response_deserializer=gmm_dataset.Dataset.deserialize, - ) - return self._stubs["create_dataset"] - - @property - def update_dataset_metadata( - self, - ) -> Callable[ - [maps_platform_datasets.UpdateDatasetMetadataRequest], - Awaitable[gmm_dataset.Dataset], - ]: - r"""Return a callable for the update dataset metadata method over gRPC. - - Update the metadata for the dataset. To update the - data use: UploadDataset. + By default 1000 controls are allowed for a data store. A request + can be submitted to adjust this limit. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to create + already exists, an ALREADY_EXISTS error is returned. Returns: - Callable[[~.UpdateDatasetMetadataRequest], - Awaitable[~.Dataset]]: + Callable[[~.CreateControlRequest], + Awaitable[~.Control]]: A function that, when called, will call the underlying RPC on the server. """ @@ -292,56 +270,28 @@ def update_dataset_metadata( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_dataset_metadata" not in self._stubs: - self._stubs["update_dataset_metadata"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/UpdateDatasetMetadata", - request_serializer=maps_platform_datasets.UpdateDatasetMetadataRequest.serialize, - response_deserializer=gmm_dataset.Dataset.deserialize, + if "create_control" not in self._stubs: + self._stubs["create_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/CreateControl", + request_serializer=control_service.CreateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, ) - return self._stubs["update_dataset_metadata"] + return self._stubs["create_control"] @property - def get_dataset( + def delete_control( self, - ) -> Callable[ - [maps_platform_datasets.GetDatasetRequest], Awaitable[dataset.Dataset] - ]: - r"""Return a callable for the get dataset method over gRPC. - - Get the published or latest version of the dataset. - - Returns: - Callable[[~.GetDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_dataset" not in self._stubs: - self._stubs["get_dataset"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/GetDataset", - request_serializer=maps_platform_datasets.GetDatasetRequest.serialize, - response_deserializer=dataset.Dataset.deserialize, - ) - return self._stubs["get_dataset"] + ) -> Callable[[control_service.DeleteControlRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete control method over gRPC. - @property - def list_dataset_versions( - self, - ) -> Callable[ - [maps_platform_datasets.ListDatasetVersionsRequest], - Awaitable[maps_platform_datasets.ListDatasetVersionsResponse], - ]: - r"""Return a callable for the list dataset versions method over gRPC. + Deletes a Control. - List all the versions of a dataset. + If the [Control][google.cloud.discoveryengine.v1beta.Control] to + delete does not exist, a NOT_FOUND error is returned. Returns: - Callable[[~.ListDatasetVersionsRequest], - Awaitable[~.ListDatasetVersionsResponse]]: + Callable[[~.DeleteControlRequest], + Awaitable[~.Empty]]: A function that, when called, will call the underlying RPC on the server. """ @@ -349,28 +299,32 @@ def list_dataset_versions( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_dataset_versions" not in self._stubs: - self._stubs["list_dataset_versions"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasetVersions", - request_serializer=maps_platform_datasets.ListDatasetVersionsRequest.serialize, - response_deserializer=maps_platform_datasets.ListDatasetVersionsResponse.deserialize, + if "delete_control" not in self._stubs: + self._stubs["delete_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/DeleteControl", + request_serializer=control_service.DeleteControlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs["list_dataset_versions"] + return self._stubs["delete_control"] @property - def list_datasets( + def update_control( self, ) -> Callable[ - [maps_platform_datasets.ListDatasetsRequest], - Awaitable[maps_platform_datasets.ListDatasetsResponse], + [control_service.UpdateControlRequest], Awaitable[gcd_control.Control] ]: - r"""Return a callable for the list datasets method over gRPC. + r"""Return a callable for the update control method over gRPC. - List all the datasets for the specified project. + Updates a Control. + + [Control][google.cloud.discoveryengine.v1beta.Control] action + type cannot be changed. If the + [Control][google.cloud.discoveryengine.v1beta.Control] to update + does not exist, a NOT_FOUND error is returned. Returns: - Callable[[~.ListDatasetsRequest], - Awaitable[~.ListDatasetsResponse]]: + Callable[[~.UpdateControlRequest], + Awaitable[~.Control]]: A function that, when called, will call the underlying RPC on the server. """ @@ -378,28 +332,25 @@ def list_datasets( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_datasets" not in self._stubs: - self._stubs["list_datasets"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasets", - request_serializer=maps_platform_datasets.ListDatasetsRequest.serialize, - response_deserializer=maps_platform_datasets.ListDatasetsResponse.deserialize, + if "update_control" not in self._stubs: + self._stubs["update_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/UpdateControl", + request_serializer=control_service.UpdateControlRequest.serialize, + response_deserializer=gcd_control.Control.deserialize, ) - return self._stubs["list_datasets"] + return self._stubs["update_control"] @property - def delete_dataset( + def get_control( self, - ) -> Callable[ - [maps_platform_datasets.DeleteDatasetRequest], Awaitable[empty_pb2.Empty] - ]: - r"""Return a callable for the delete dataset method over gRPC. + ) -> Callable[[control_service.GetControlRequest], Awaitable[control.Control]]: + r"""Return a callable for the get control method over gRPC. - Delete the specified dataset and optionally all its - corresponding versions. + Gets a Control. Returns: - Callable[[~.DeleteDatasetRequest], - Awaitable[~.Empty]]: + Callable[[~.GetControlRequest], + Awaitable[~.Control]]: A function that, when called, will call the underlying RPC on the server. """ @@ -407,27 +358,29 @@ def delete_dataset( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_dataset" not in self._stubs: - self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDataset", - request_serializer=maps_platform_datasets.DeleteDatasetRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + if "get_control" not in self._stubs: + self._stubs["get_control"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/GetControl", + request_serializer=control_service.GetControlRequest.serialize, + response_deserializer=control.Control.deserialize, ) - return self._stubs["delete_dataset"] + return self._stubs["get_control"] @property - def delete_dataset_version( + def list_controls( self, ) -> Callable[ - [maps_platform_datasets.DeleteDatasetVersionRequest], Awaitable[empty_pb2.Empty] + [control_service.ListControlsRequest], + Awaitable[control_service.ListControlsResponse], ]: - r"""Return a callable for the delete dataset version method over gRPC. + r"""Return a callable for the list controls method over gRPC. - Delete a specific version of the dataset. + Lists all Controls by their parent + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. Returns: - Callable[[~.DeleteDatasetVersionRequest], - Awaitable[~.Empty]]: + Callable[[~.ListControlsRequest], + Awaitable[~.ListControlsResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -435,76 +388,39 @@ def delete_dataset_version( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_dataset_version" not in self._stubs: - self._stubs["delete_dataset_version"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDatasetVersion", - request_serializer=maps_platform_datasets.DeleteDatasetVersionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + if "list_controls" not in self._stubs: + self._stubs["list_controls"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ControlService/ListControls", + request_serializer=control_service.ListControlsRequest.serialize, + response_deserializer=control_service.ListControlsResponse.deserialize, ) - return self._stubs["delete_dataset_version"] + return self._stubs["list_controls"] def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.create_dataset: gapic_v1.method_async.wrap_method( - self.create_dataset, - default_timeout=60.0, - client_info=client_info, - ), - self.update_dataset_metadata: gapic_v1.method_async.wrap_method( - self.update_dataset_metadata, - default_timeout=60.0, - client_info=client_info, - ), - self.get_dataset: gapic_v1.method_async.wrap_method( - self.get_dataset, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self.create_control: gapic_v1.method_async.wrap_method( + self.create_control, + default_timeout=None, client_info=client_info, ), - self.list_dataset_versions: gapic_v1.method_async.wrap_method( - self.list_dataset_versions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self.delete_control: gapic_v1.method_async.wrap_method( + self.delete_control, + default_timeout=None, client_info=client_info, ), - self.list_datasets: gapic_v1.method_async.wrap_method( - self.list_datasets, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self.update_control: gapic_v1.method_async.wrap_method( + self.update_control, + default_timeout=None, client_info=client_info, ), - self.delete_dataset: gapic_v1.method_async.wrap_method( - self.delete_dataset, - default_timeout=60.0, + self.get_control: gapic_v1.method_async.wrap_method( + self.get_control, + default_timeout=None, client_info=client_info, ), - self.delete_dataset_version: gapic_v1.method_async.wrap_method( - self.delete_dataset_version, + self.list_controls: gapic_v1.method_async.wrap_method( + self.list_controls, default_timeout=None, client_info=client_info, ), @@ -513,5 +429,58 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + -__all__ = ("MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport",) +__all__ = ("ControlServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py new file mode 100644 index 000000000000..19d3418393d6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py @@ -0,0 +1,1223 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service + +from .base import ControlServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ControlServiceRestInterceptor: + """Interceptor for ControlService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ControlServiceRestTransport. + + .. code-block:: python + class MyCustomControlServiceInterceptor(ControlServiceRestInterceptor): + def pre_create_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_control(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_control(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_controls(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_controls(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_control(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_control(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ControlServiceRestTransport(interceptor=MyCustomControlServiceInterceptor()) + client = ControlServiceClient(transport=transport) + + + """ + + def pre_create_control( + self, + request: control_service.CreateControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.CreateControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_create_control(self, response: gcd_control.Control) -> gcd_control.Control: + """Post-rpc interceptor for create_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_delete_control( + self, + request: control_service.DeleteControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.DeleteControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def pre_get_control( + self, + request: control_service.GetControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.GetControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_get_control(self, response: control.Control) -> control.Control: + """Post-rpc interceptor for get_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_list_controls( + self, + request: control_service.ListControlsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.ListControlsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_controls + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_list_controls( + self, response: control_service.ListControlsResponse + ) -> control_service.ListControlsResponse: + """Post-rpc interceptor for list_controls + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_update_control( + self, + request: control_service.UpdateControlRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control_service.UpdateControlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_control + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_update_control(self, response: gcd_control.Control) -> gcd_control.Control: + """Post-rpc interceptor for update_control + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ControlService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ControlService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ControlServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ControlServiceRestInterceptor + + +class ControlServiceRestTransport(ControlServiceTransport): + """REST backend transport for ControlService. + + Service for performing CRUD operations on Controls. + Controls allow for custom logic to be implemented in the serving + path. Controls need to be attached to a Serving Config to be + considered during a request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ControlServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ControlServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateControl(ControlServiceRestStub): + def __hash__(self): + return hash("CreateControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "controlId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.CreateControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Call the create control method over HTTP. + + Args: + request (~.control_service.CreateControlRequest): + The request object. Request for CreateControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/controls", + "body": "control", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/controls", + "body": "control", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/engines/*}/controls", + "body": "control", + }, + ] + request, metadata = self._interceptor.pre_create_control(request, metadata) + pb_request = control_service.CreateControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_control.Control() + pb_resp = gcd_control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_control(resp) + return resp + + class _DeleteControl(ControlServiceRestStub): + def __hash__(self): + return hash("DeleteControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.DeleteControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete control method over HTTP. + + Args: + request (~.control_service.DeleteControlRequest): + The request object. Request for DeleteControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/controls/*}", + }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/controls/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_control(request, metadata) + pb_request = control_service.DeleteControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetControl(ControlServiceRestStub): + def __hash__(self): + return hash("GetControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.GetControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.Control: + r"""Call the get control method over HTTP. + + Args: + request (~.control_service.GetControlRequest): + The request object. Request for GetControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/controls/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/controls/*}", + }, + ] + request, metadata = self._interceptor.pre_get_control(request, metadata) + pb_request = control_service.GetControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control.Control() + pb_resp = control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_control(resp) + return resp + + class _ListControls(ControlServiceRestStub): + def __hash__(self): + return hash("ListControls") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.ListControlsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control_service.ListControlsResponse: + r"""Call the list controls method over HTTP. + + Args: + request (~.control_service.ListControlsRequest): + The request object. Request for ListControls method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control_service.ListControlsResponse: + Response for ListControls method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/controls", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/controls", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/engines/*}/controls", + }, + ] + request, metadata = self._interceptor.pre_list_controls(request, metadata) + pb_request = control_service.ListControlsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control_service.ListControlsResponse() + pb_resp = control_service.ListControlsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_controls(resp) + return resp + + class _UpdateControl(ControlServiceRestStub): + def __hash__(self): + return hash("UpdateControl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control_service.UpdateControlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_control.Control: + r"""Call the update control method over HTTP. + + Args: + request (~.control_service.UpdateControlRequest): + The request object. Request for UpdateControl method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_control.Control: + Defines a conditioned behavior to employ during serving. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{control.name=projects/*/locations/*/dataStores/*/controls/*}", + "body": "control", + }, + { + "method": "patch", + "uri": "/v1beta/{control.name=projects/*/locations/*/collections/*/dataStores/*/controls/*}", + "body": "control", + }, + { + "method": "patch", + "uri": "/v1beta/{control.name=projects/*/locations/*/collections/*/engines/*/controls/*}", + "body": "control", + }, + ] + request, metadata = self._interceptor.pre_update_control(request, metadata) + pb_request = control_service.UpdateControlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_control.Control() + pb_resp = gcd_control.Control.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_control(resp) + return resp + + @property + def create_control( + self, + ) -> Callable[[control_service.CreateControlRequest], gcd_control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_control( + self, + ) -> Callable[[control_service.DeleteControlRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_control( + self, + ) -> Callable[[control_service.GetControlRequest], control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_controls( + self, + ) -> Callable[ + [control_service.ListControlsRequest], control_service.ListControlsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListControls(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_control( + self, + ) -> Callable[[control_service.UpdateControlRequest], gcd_control.Control]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateControl(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ControlServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ControlServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py index 8f4508627d9f..44f10114c701 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py @@ -1908,6 +1908,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ConversationalSearchServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py index 3ee3dbbb9deb..e222a3080a73 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py @@ -2429,6 +2429,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/base.py index 2b07aafb2682..f8d0ff207136 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/base.py @@ -360,6 +360,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc.py index 3ae1aa16d856..67c86c4ef4ac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc.py @@ -637,6 +637,23 @@ def list_sessions( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc_asyncio.py index 50fec772c0f9..7eb6767912e9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/grpc_asyncio.py @@ -720,6 +720,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py index 22ac99a2bd12..02be7eb7984d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py @@ -474,6 +474,27 @@ def post_update_session(self, response: gcd_session.Session) -> gcd_session.Sess """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConversationalSearchService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ConversationalSearchService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -2036,6 +2057,76 @@ def update_session( # In C++ this would require a dynamic_cast return self._UpdateSession(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ConversationalSearchServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py index 8423ea5f0ed0..41b52f50259b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py @@ -1097,6 +1097,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "DataStoreServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py index 5186c3c941ba..b8fd73efeb50 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py @@ -1576,6 +1576,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py index 9dd82b0ddf1a..936b9d82875c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py @@ -240,6 +240,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py index d9b57dfbd414..8199bb5b7bda 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py @@ -408,6 +408,23 @@ def update_data_store( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py index 0df37bff13db..795205e7495e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py @@ -446,6 +446,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py index ad561a965433..2336b25a04d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py @@ -235,6 +235,27 @@ def post_update_data_store( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -391,6 +412,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1064,6 +1097,76 @@ def update_data_store( # In C++ this would require a dynamic_cast return self._UpdateDataStore(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py index d4da7f5fcfef..f7e14c9d5444 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py @@ -593,7 +593,7 @@ async def sample_create_document(): document_id (:class:`str`): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1beta.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1beta.Document.name]. If the caller does not have permission to create the @@ -741,7 +741,7 @@ async def sample_update_document(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided imported 'document' to update. - If not set, will by default update all + If not set, by default updates all fields. This corresponds to the ``update_mask`` field @@ -928,8 +928,8 @@ async def import_documents( ) -> operation_async.AsyncOperation: r"""Bulk import of multiple [Document][google.cloud.discoveryengine.v1beta.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1beta.Document]s to be @@ -1261,6 +1261,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "DocumentServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index 4b0fec87e0ab..fab5052b3834 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -1055,7 +1055,7 @@ def sample_create_document(): document_id (str): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1beta.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1beta.Document.name]. If the caller does not have permission to create the @@ -1200,7 +1200,7 @@ def sample_update_document(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided imported 'document' to update. - If not set, will by default update all + If not set, by default updates all fields. This corresponds to the ``update_mask`` field @@ -1381,8 +1381,8 @@ def import_documents( ) -> operation.Operation: r"""Bulk import of multiple [Document][google.cloud.discoveryengine.v1beta.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1beta.Document]s to be @@ -1723,6 +1723,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py index 40352ee68bae..0ef1513b4f42 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py @@ -282,6 +282,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py index d8589a0a04e9..692940419fa0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py @@ -403,8 +403,8 @@ def import_documents( Bulk import of multiple [Document][google.cloud.discoveryengine.v1beta.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1beta.Document]s to be @@ -475,6 +475,23 @@ def purge_documents( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py index 70652ba376db..711acb4e1a84 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py @@ -416,8 +416,8 @@ def import_documents( Bulk import of multiple [Document][google.cloud.discoveryengine.v1beta.Document]s. - Request processing may be synchronous. Non-existing items will - be created. + Request processing may be synchronous. Non-existing items are + created. Note: It is possible for a subset of the [Document][google.cloud.discoveryengine.v1beta.Document]s to be @@ -539,6 +539,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index b4a44db777f1..82f8b1f97d94 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -285,6 +285,27 @@ def post_update_document( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -441,6 +462,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1306,6 +1339,76 @@ def update_document( # In C++ this would require a dynamic_cast return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DocumentServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py index 00d90d811cf1..51e40351c299 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py @@ -1396,6 +1396,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "EngineServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py index 93032e37a856..e16d5ef45300 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py @@ -1827,6 +1827,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py index aa37bb6b9f8d..cd8d68938170 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py @@ -282,6 +282,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py index 29e939acb63e..c2884a2a005f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py @@ -475,6 +475,23 @@ def tune_engine( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py index 1a6ff22393e2..668dd741c49f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py @@ -535,6 +535,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py index 214127a9f05c..c2b45b15493f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py @@ -320,6 +320,27 @@ def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -475,6 +496,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1423,6 +1456,76 @@ def update_engine( # In C++ this would require a dynamic_cast return self._UpdateEngine(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py index 8fbd1a8a2339..eae9a5cff1b9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py @@ -483,6 +483,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "GroundedGenerationServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py index 5337aee1250d..9c449bd30209 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py @@ -906,6 +906,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/base.py index 98d9f091b6ad..f057e804b638 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/base.py @@ -177,6 +177,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc.py index d0d85e728773..27d7ff1b66de 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc.py @@ -267,6 +267,23 @@ def check_grounding( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc_asyncio.py index 76ee68eda59c..e4b6b877c0df 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/grpc_asyncio.py @@ -281,6 +281,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py index e46e5a34e079..0d9dc3f49f17 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py @@ -105,6 +105,27 @@ def post_check_grounding( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GroundedGenerationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the GroundedGenerationService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -356,6 +377,76 @@ def check_grounding( # In C++ this would require a dynamic_cast return self._CheckGrounding(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(GroundedGenerationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/__init__.py new file mode 100644 index 000000000000..245aea0dfd81 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ProjectServiceAsyncClient +from .client import ProjectServiceClient + +__all__ = ( + "ProjectServiceClient", + "ProjectServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py new file mode 100644 index 000000000000..df503d3ae8ce --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import project, project_service + +from .client import ProjectServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .transports.grpc_asyncio import ProjectServiceGrpcAsyncIOTransport + + +class ProjectServiceAsyncClient: + """Service for operations on the + [Project][google.cloud.discoveryengine.v1beta.Project]. + """ + + _client: ProjectServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProjectServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProjectServiceClient._DEFAULT_UNIVERSE + + project_path = staticmethod(ProjectServiceClient.project_path) + parse_project_path = staticmethod(ProjectServiceClient.parse_project_path) + common_billing_account_path = staticmethod( + ProjectServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProjectServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProjectServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ProjectServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ProjectServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ProjectServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProjectServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ProjectServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ProjectServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ProjectServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceAsyncClient: The constructed client. + """ + return ProjectServiceClient.from_service_account_info.__func__(ProjectServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceAsyncClient: The constructed client. + """ + return ProjectServiceClient.from_service_account_file.__func__(ProjectServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProjectServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProjectServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProjectServiceTransport, Callable[..., ProjectServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the project service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProjectServiceTransport,Callable[..., ProjectServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProjectServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def provision_project( + self, + request: Optional[Union[project_service.ProvisionProjectRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_provision_project(): + # Create a client + client = discoveryengine_v1beta.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ProvisionProjectRequest, dict]]): + The request object. Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1beta.ProjectService.ProvisionProject] + method. + name (:class:`str`): + Required. Full resource name of a + [Project][google.cloud.discoveryengine.v1beta.Project], + such as ``projects/{project_id_or_number}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.Project` + Metadata and configurations for a Google Cloud project + in the service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.ProvisionProjectRequest): + request = project_service.ProvisionProjectRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.provision_project + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + project.Project, + metadata_type=project_service.ProvisionProjectMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ProjectServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProjectServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py new file mode 100644 index 000000000000..79d0321f3010 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py @@ -0,0 +1,998 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import project, project_service + +from .transports.base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .transports.grpc import ProjectServiceGrpcTransport +from .transports.grpc_asyncio import ProjectServiceGrpcAsyncIOTransport +from .transports.rest import ProjectServiceRestTransport + + +class ProjectServiceClientMeta(type): + """Metaclass for the ProjectService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ProjectServiceTransport]] + _transport_registry["grpc"] = ProjectServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ProjectServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ProjectServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProjectServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProjectServiceClient(metaclass=ProjectServiceClientMeta): + """Service for operations on the + [Project][google.cloud.discoveryengine.v1beta.Project]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProjectServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_project_path(path: str) -> Dict[str, str]: + """Parses a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProjectServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProjectServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProjectServiceTransport, Callable[..., ProjectServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the project service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProjectServiceTransport,Callable[..., ProjectServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProjectServiceClient._read_environment_variables() + self._client_cert_source = ProjectServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProjectServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProjectServiceTransport) + if transport_provided: + # transport is a ProjectServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProjectServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ProjectServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProjectServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def provision_project( + self, + request: Optional[Union[project_service.ProvisionProjectRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_provision_project(): + # Create a client + client = discoveryengine_v1beta.ProjectServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ProvisionProjectRequest, dict]): + The request object. Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1beta.ProjectService.ProvisionProject] + method. + name (str): + Required. Full resource name of a + [Project][google.cloud.discoveryengine.v1beta.Project], + such as ``projects/{project_id_or_number}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.Project` + Metadata and configurations for a Google Cloud project + in the service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.ProvisionProjectRequest): + request = project_service.ProvisionProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.provision_project] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + project.Project, + metadata_type=project_service.ProvisionProjectMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ProjectServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProjectServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/__init__.py new file mode 100644 index 000000000000..bfc15c764467 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProjectServiceTransport +from .grpc import ProjectServiceGrpcTransport +from .grpc_asyncio import ProjectServiceGrpcAsyncIOTransport +from .rest import ProjectServiceRestInterceptor, ProjectServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ProjectServiceTransport]] +_transport_registry["grpc"] = ProjectServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ProjectServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ProjectServiceRestTransport + +__all__ = ( + "ProjectServiceTransport", + "ProjectServiceGrpcTransport", + "ProjectServiceGrpcAsyncIOTransport", + "ProjectServiceRestTransport", + "ProjectServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/base.py new file mode 100644 index 000000000000..1bb8f3535f4a --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/base.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import project_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProjectServiceTransport(abc.ABC): + """Abstract transport class for ProjectService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.provision_project: gapic_v1.method.wrap_method( + self.provision_project, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def provision_project( + self, + ) -> Callable[ + [project_service.ProvisionProjectRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProjectServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/grpc.py new file mode 100644 index 000000000000..c93b39518427 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/grpc.py @@ -0,0 +1,347 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import project_service + +from .base import DEFAULT_CLIENT_INFO, ProjectServiceTransport + + +class ProjectServiceGrpcTransport(ProjectServiceTransport): + """gRPC backend transport for ProjectService. + + Service for operations on the + [Project][google.cloud.discoveryengine.v1beta.Project]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def provision_project( + self, + ) -> Callable[[project_service.ProvisionProjectRequest], operations_pb2.Operation]: + r"""Return a callable for the provision project method over gRPC. + + Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + Returns: + Callable[[~.ProvisionProjectRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "provision_project" not in self._stubs: + self._stubs["provision_project"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ProjectService/ProvisionProject", + request_serializer=project_service.ProvisionProjectRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["provision_project"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProjectServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2e077d6d4bce --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/grpc_asyncio.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import project_service + +from .base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .grpc import ProjectServiceGrpcTransport + + +class ProjectServiceGrpcAsyncIOTransport(ProjectServiceTransport): + """gRPC AsyncIO backend transport for ProjectService. + + Service for operations on the + [Project][google.cloud.discoveryengine.v1beta.Project]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def provision_project( + self, + ) -> Callable[ + [project_service.ProvisionProjectRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the provision project method over gRPC. + + Provisions the project resource. During the process, related + systems will get prepared and initialized. + + Caller must read the `Terms for data + use `__, and + optionally specify in request to provide consent to that service + terms. + + Returns: + Callable[[~.ProvisionProjectRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "provision_project" not in self._stubs: + self._stubs["provision_project"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ProjectService/ProvisionProject", + request_serializer=project_service.ProvisionProjectRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["provision_project"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.provision_project: gapic_v1.method_async.wrap_method( + self.provision_project, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ProjectServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py new file mode 100644 index 000000000000..4b66b216811c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py @@ -0,0 +1,853 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import project_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProjectServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProjectServiceRestInterceptor: + """Interceptor for ProjectService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProjectServiceRestTransport. + + .. code-block:: python + class MyCustomProjectServiceInterceptor(ProjectServiceRestInterceptor): + def pre_provision_project(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_provision_project(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProjectServiceRestTransport(interceptor=MyCustomProjectServiceInterceptor()) + client = ProjectServiceClient(transport=transport) + + + """ + + def pre_provision_project( + self, + request: project_service.ProvisionProjectRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.ProvisionProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for provision_project + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_provision_project( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for provision_project + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProjectServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProjectServiceRestInterceptor + + +class ProjectServiceRestTransport(ProjectServiceTransport): + """REST backend transport for ProjectService. + + Service for operations on the + [Project][google.cloud.discoveryengine.v1beta.Project]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProjectServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProjectServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ProvisionProject(ProjectServiceRestStub): + def __hash__(self): + return hash("ProvisionProject") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.ProvisionProjectRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the provision project method over HTTP. + + Args: + request (~.project_service.ProvisionProjectRequest): + The request object. Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1beta.ProjectService.ProvisionProject] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*}:provision", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_provision_project( + request, metadata + ) + pb_request = project_service.ProvisionProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_provision_project(resp) + return resp + + @property + def provision_project( + self, + ) -> Callable[[project_service.ProvisionProjectRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProvisionProject(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProjectServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py index 9f03eb8711cd..ffc1bc7f3e00 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py @@ -458,6 +458,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "RankServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py index 842ef0e2a010..ac877d0c2281 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py @@ -888,6 +888,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/base.py index b16166fde489..625244642df4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/base.py @@ -174,6 +174,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc.py index a147414bea44..12c7a880a919 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc.py @@ -263,6 +263,23 @@ def rank(self) -> Callable[[rank_service.RankRequest], rank_service.RankResponse def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc_asyncio.py index 1444b6b69e09..aaf79ff2ecce 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/grpc_asyncio.py @@ -279,6 +279,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py index 5e5e71bfe43b..34d6325b6e9d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py @@ -101,6 +101,27 @@ def post_rank( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RankService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RankService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -347,6 +368,76 @@ def rank(self) -> Callable[[rank_service.RankRequest], rank_service.RankResponse # In C++ this would require a dynamic_cast return self._Rank(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RankServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py index 1d2de1a29f4b..387914f6741e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py @@ -65,8 +65,14 @@ class RecommendationServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = RecommendationServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = RecommendationServiceClient._DEFAULT_UNIVERSE + data_store_path = staticmethod(RecommendationServiceClient.data_store_path) + parse_data_store_path = staticmethod( + RecommendationServiceClient.parse_data_store_path + ) document_path = staticmethod(RecommendationServiceClient.document_path) parse_document_path = staticmethod(RecommendationServiceClient.parse_document_path) + engine_path = staticmethod(RecommendationServiceClient.engine_path) + parse_engine_path = staticmethod(RecommendationServiceClient.parse_engine_path) serving_config_path = staticmethod(RecommendationServiceClient.serving_config_path) parse_serving_config_path = staticmethod( RecommendationServiceClient.parse_serving_config_path @@ -475,6 +481,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "RecommendationServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py index 9f8d4b4f2383..b7fe7b254248 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py @@ -185,6 +185,28 @@ def transport(self) -> RecommendationServiceTransport: """ return self._transport + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def document_path( project: str, @@ -211,6 +233,30 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def serving_config_path( project: str, @@ -930,6 +976,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/base.py index f443c5085faa..0c0df8a3b7fd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/base.py @@ -177,6 +177,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc.py index 65c4ee1b4f06..2b1811a52307 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc.py @@ -268,6 +268,23 @@ def recommend( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc_asyncio.py index bc6bfaf6284f..539077ed9140 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/grpc_asyncio.py @@ -282,6 +282,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py index 93bcf6b212a2..ddd153633b6b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py @@ -103,6 +103,27 @@ def post_recommend( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RecommendationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RecommendationService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -361,6 +382,76 @@ def recommend( # In C++ this would require a dynamic_cast return self._Recommend(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RecommendationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py index 5e95e96c0250..7ded1377048e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py @@ -569,7 +569,7 @@ async def sample_create_schema(): schema_id (:class:`str`): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1beta.Schema], - which will become the final component of the + which becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1beta.Schema.name]. This field should conform to @@ -999,6 +999,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SchemaServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py index 6a3406c490a7..d483951dff6e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py @@ -1003,7 +1003,7 @@ def sample_create_schema(): schema_id (str): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1beta.Schema], - which will become the final component of the + which becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1beta.Schema.name]. This field should conform to @@ -1438,6 +1438,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py index bdea449e7101..833d2d0a7b22 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py @@ -238,6 +238,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py index 44e5afe7651e..2a5a48ea5e28 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py @@ -385,6 +385,23 @@ def delete_schema( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py index 63af9911eb37..29b5547c58ea 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py @@ -430,6 +430,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py index 19a305bc7bcb..a3a9ba6206e6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py @@ -231,6 +231,27 @@ def post_update_schema( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -386,6 +407,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -1041,6 +1074,76 @@ def update_schema( # In C++ this would require a dynamic_cast return self._UpdateSchema(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py index b9d399204ab0..40734c90ff47 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py @@ -481,6 +481,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SearchServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py index 231cdfbbc100..c42f322fd40f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py @@ -978,6 +978,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py index 15cc30cd0208..4814c9fd3703 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py @@ -174,6 +174,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py index 7fdba90de5ee..a2211d5a5477 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py @@ -264,6 +264,23 @@ def search( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py index fe7a4fb90e34..b1c0e2d2d1a2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py @@ -280,6 +280,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py index 57f58cbae182..13ea840eec3f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py @@ -101,6 +101,27 @@ def post_search( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -359,6 +380,76 @@ def search( # In C++ this would require a dynamic_cast return self._Search(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py index fe1a9b6e1396..ea27f1e24c7c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py @@ -48,7 +48,10 @@ from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1beta.types import search_tuning_service +from google.cloud.discoveryengine_v1beta.types import ( + custom_tuning_model, + search_tuning_service, +) from .client import SearchTuningServiceClient from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport @@ -67,6 +70,12 @@ class SearchTuningServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = SearchTuningServiceClient._DEFAULT_UNIVERSE + custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.custom_tuning_model_path + ) + parse_custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.parse_custom_tuning_model_path + ) data_store_path = staticmethod(SearchTuningServiceClient.data_store_path) parse_data_store_path = staticmethod( SearchTuningServiceClient.parse_data_store_path @@ -373,6 +382,96 @@ async def sample_train_custom_model(): # Done; return the response. return response + async def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1beta.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListCustomModelsRequest, dict]]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_custom_models + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, @@ -487,6 +586,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SearchTuningServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py index 9fb49df58db5..116e20bf5440 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py @@ -53,7 +53,10 @@ from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1beta.types import search_tuning_service +from google.cloud.discoveryengine_v1beta.types import ( + custom_tuning_model, + search_tuning_service, +) from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport from .transports.grpc import SearchTuningServiceGrpcTransport @@ -187,6 +190,30 @@ def transport(self) -> SearchTuningServiceTransport: """ return self._transport + @staticmethod + def custom_tuning_model_path( + project: str, + location: str, + data_store: str, + custom_tuning_model: str, + ) -> str: + """Returns a fully-qualified custom_tuning_model string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + + @staticmethod + def parse_custom_tuning_model_path(path: str) -> Dict[str, str]: + """Parses a custom_tuning_model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/customTuningModels/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def data_store_path( project: str, @@ -789,6 +816,94 @@ def sample_train_custom_model(): # Done; return the response. return response + def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1beta.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListCustomModelsRequest, dict]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_custom_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "SearchTuningServiceClient": return self @@ -916,6 +1031,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/base.py index eef8cb6fa0e1..a2e4487a11c5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/base.py @@ -133,6 +133,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_custom_models: gapic_v1.method.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -158,6 +163,18 @@ def train_custom_model( ]: raise NotImplementedError() + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Union[ + search_tuning_service.ListCustomModelsResponse, + Awaitable[search_tuning_service.ListCustomModelsResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, @@ -179,6 +196,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc.py index c391bbe1ed27..c58b220a9aef 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc.py @@ -278,9 +278,55 @@ def train_custom_model( ) return self._stubs["train_custom_model"] + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + ~.ListCustomModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc_asyncio.py index 0d012e9a38e0..5bcb1324a7d3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/grpc_asyncio.py @@ -285,6 +285,35 @@ def train_custom_model( ) return self._stubs["train_custom_model"] + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Awaitable[search_tuning_service.ListCustomModelsResponse], + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + Awaitable[~.ListCustomModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -293,11 +322,33 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_custom_models: gapic_v1.method_async.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), } def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py index 942d158bd8db..8ebb6417b614 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py @@ -72,6 +72,14 @@ class SearchTuningServiceRestInterceptor: .. code-block:: python class MyCustomSearchTuningServiceInterceptor(SearchTuningServiceRestInterceptor): + def pre_list_custom_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_custom_models(self, response): + logging.log(f"Received response: {response}") + return response + def pre_train_custom_model(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -86,6 +94,31 @@ def post_train_custom_model(self, response): """ + def pre_list_custom_models( + self, + request: search_tuning_service.ListCustomModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_custom_models( + self, response: search_tuning_service.ListCustomModelsResponse + ) -> search_tuning_service.ListCustomModelsResponse: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + def pre_train_custom_model( self, request: search_tuning_service.TrainCustomModelRequest, @@ -111,6 +144,27 @@ def post_train_custom_model( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -265,6 +319,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -399,6 +465,98 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _ListCustomModels(SearchTuningServiceRestStub): + def __hash__(self): + return hash("ListCustomModels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.ListCustomModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Call the list custom models method over HTTP. + + Args: + request (~.search_tuning_service.ListCustomModelsRequest): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_tuning_service.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{data_store=projects/*/locations/*/collections/*/dataStores/*}/customModels", + }, + ] + request, metadata = self._interceptor.pre_list_custom_models( + request, metadata + ) + pb_request = search_tuning_service.ListCustomModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_tuning_service.ListCustomModelsResponse() + pb_resp = search_tuning_service.ListCustomModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_custom_models(resp) + return resp + class _TrainCustomModel(SearchTuningServiceRestStub): def __hash__(self): return hash("TrainCustomModel") @@ -496,6 +654,17 @@ def __call__( resp = self._interceptor.post_train_custom_model(resp) return resp + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCustomModels(self._session, self._host, self._interceptor) # type: ignore + @property def train_custom_model( self, @@ -506,6 +675,76 @@ def train_custom_model( # In C++ this would require a dynamic_cast return self._TrainCustomModel(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py index b6d738f730ed..7429162f6de6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py @@ -763,6 +763,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "ServingConfigServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py index d9c2cb79cecb..b7ebf097c488 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py @@ -1187,6 +1187,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py index 8922b5343ac8..0cf0bddb96c2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py @@ -212,6 +212,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py index 93e89e7f74c8..ba42061bb004 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py @@ -334,6 +334,23 @@ def list_serving_configs( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py index 6781eb8dff42..0c60b6d7db9c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py @@ -359,6 +359,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py index c428a2f7f176..4e4095433f92 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py @@ -175,6 +175,27 @@ def post_update_serving_config( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -661,6 +682,76 @@ def update_serving_config( # In C++ this would require a dynamic_cast return self._UpdateServingConfig(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ServingConfigServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py index 9c6b0d66ecc7..2eb6bc3adb7b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py @@ -1845,6 +1845,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SiteSearchEngineServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py index a98c776449a0..a305e255fc3c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py @@ -2272,6 +2272,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py index e98bc92e9beb..ace7e817b1bc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py @@ -345,6 +345,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py index acba2cd7af07..a6a281ceb3aa 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py @@ -616,6 +616,23 @@ def fetch_domain_verification_status( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py index be072c5e3ce7..dd0e09c472a6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py @@ -692,6 +692,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py index 0c4435ab846f..8b8eb9e76fea 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py @@ -482,6 +482,27 @@ def post_update_target_site( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -636,6 +657,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -2097,6 +2130,76 @@ def update_target_site( # In C++ this would require a dynamic_cast return self._UpdateTargetSite(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py index b72434459019..d06b00ac0cde 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py @@ -81,6 +81,8 @@ class UserEventServiceAsyncClient: parse_data_store_path = staticmethod(UserEventServiceClient.parse_data_store_path) document_path = staticmethod(UserEventServiceClient.document_path) parse_document_path = staticmethod(UserEventServiceClient.parse_document_path) + engine_path = staticmethod(UserEventServiceClient.engine_path) + parse_engine_path = staticmethod(UserEventServiceClient.parse_engine_path) common_billing_account_path = staticmethod( UserEventServiceClient.common_billing_account_path ) @@ -326,7 +328,7 @@ async def sample_write_user_event(): UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ # Create or coerce a protobuf request object. @@ -506,7 +508,7 @@ async def import_user_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Bulk import of User events. Request processing might + r"""Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -725,6 +727,63 @@ async def get_operation( # Done; return the response. return response + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "UserEventServiceAsyncClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py index 3f51faba876e..d6f13757b0f0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py @@ -245,6 +245,30 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -771,7 +795,7 @@ def sample_write_user_event(): UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ # Create or coerce a protobuf request object. @@ -947,7 +971,7 @@ def import_user_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Bulk import of User events. Request processing might + r"""Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -1177,6 +1201,63 @@ def get_operation( # Done; return the response. return response + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/base.py index 2d3f5caf96d7..4920bc1f92cf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/base.py @@ -221,6 +221,12 @@ def get_operation( ]: raise NotImplementedError() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc.py index 3f8a64c3626f..1da343c486d5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc.py @@ -320,7 +320,7 @@ def import_user_events( ) -> Callable[[import_config.ImportUserEventsRequest], operations_pb2.Operation]: r"""Return a callable for the import user events method over gRPC. - Bulk import of User events. Request processing might + Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -350,6 +350,23 @@ def import_user_events( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc_asyncio.py index 2cf4572dbef2..3816aa852e0e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/grpc_asyncio.py @@ -332,7 +332,7 @@ def import_user_events( ]: r"""Return a callable for the import user events method over gRPC. - Bulk import of User events. Request processing might + Bulk import of user events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. @@ -391,6 +391,23 @@ def _prep_wrapped_messages(self, client_info): def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + @property def get_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py index 6ff4e92befea..1802f3f2cc2b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py @@ -176,6 +176,27 @@ def post_write_user_event( """ return response + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -331,6 +352,18 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], "google.longrunning.Operations.GetOperation": [ { "method": "get", @@ -748,7 +781,7 @@ def __call__( UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact - with customers' website. + with your website. """ @@ -763,6 +796,11 @@ def __call__( "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:write", "body": "user_event", }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/userEvents:write", + "body": "user_event", + }, ] request, metadata = self._interceptor.pre_write_user_event( request, metadata @@ -837,6 +875,76 @@ def write_user_event( # In C++ this would require a dynamic_cast return self._WriteUserEvent(self._session, self._host, self._interceptor) # type: ignore + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(UserEventServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py index 322aa6aed29b..c79184d5e0b9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py @@ -22,11 +22,21 @@ Interval, SearchAddOn, SearchTier, + SearchUseCase, SolutionType, UserInfo, ) from .completion import SuggestionDenyListEntry from .completion_service import CompleteQueryRequest, CompleteQueryResponse +from .control import Condition, Control +from .control_service import ( + CreateControlRequest, + DeleteControlRequest, + GetControlRequest, + ListControlsRequest, + ListControlsResponse, + UpdateControlRequest, +) from .conversation import ( Conversation, ConversationContext, @@ -53,6 +63,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .custom_tuning_model import CustomTuningModel from .data_store import DataStore from .data_store_service import ( CreateDataStoreMetadata, @@ -116,6 +127,8 @@ ImportUserEventsResponse, SpannerSource, ) +from .project import Project +from .project_service import ProvisionProjectMetadata, ProvisionProjectRequest from .purge_config import ( PurgeDocumentsMetadata, PurgeDocumentsRequest, @@ -140,6 +153,8 @@ ) from .search_service import SearchRequest, SearchResponse from .search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, TrainCustomModelMetadata, TrainCustomModelRequest, TrainCustomModelResponse, @@ -204,10 +219,19 @@ "IndustryVertical", "SearchAddOn", "SearchTier", + "SearchUseCase", "SolutionType", "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", + "Condition", + "Control", + "CreateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + "UpdateControlRequest", "Conversation", "ConversationContext", "ConversationMessage", @@ -230,6 +254,7 @@ "ListSessionsResponse", "UpdateConversationRequest", "UpdateSessionRequest", + "CustomTuningModel", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -284,6 +309,9 @@ "ImportUserEventsRequest", "ImportUserEventsResponse", "SpannerSource", + "Project", + "ProvisionProjectMetadata", + "ProvisionProjectRequest", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", @@ -307,6 +335,8 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "TrainCustomModelMetadata", "TrainCustomModelRequest", "TrainCustomModelResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py index eeee75196a27..d2f5a75e5739 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -34,7 +35,7 @@ class Answer(proto.Message): Attributes: name (str): Immutable. Fully qualified name - ``project/*/locations/global/collections/{collection}/engines/{engine}/sessions/*/answers/*`` + ``projects/{project}/locations/global/collections/{collection}/engines/{engine}/sessions/*/answers/*`` state (google.cloud.discoveryengine_v1beta.types.Answer.State): The state of the answer generation. answer_text (str): @@ -180,6 +181,10 @@ class UnstructuredDocumentInfo(proto.Message): chunk_contents (MutableSequence[google.cloud.discoveryengine_v1beta.types.Answer.Reference.UnstructuredDocumentInfo.ChunkContent]): List of cited chunk contents derived from document content. + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON metadata for the + document. It is populated from the struct data + from the Chunk in search result. """ class ChunkContent(proto.Message): @@ -220,6 +225,11 @@ class ChunkContent(proto.Message): number=4, message="Answer.Reference.UnstructuredDocumentInfo.ChunkContent", ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) class ChunkInfo(proto.Message): r"""Chunk information. @@ -251,6 +261,10 @@ class DocumentMetadata(proto.Message): Title. page_identifier (str): Page identifier. + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON metadata for the + document. It is populated from the struct data + from the Chunk in search result. """ document: str = proto.Field( @@ -269,6 +283,11 @@ class DocumentMetadata(proto.Message): proto.STRING, number=4, ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) chunk: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py index 750a2d3c14dc..d386bcfecba0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py @@ -24,6 +24,7 @@ manifest={ "IndustryVertical", "SolutionType", + "SearchUseCase", "SearchTier", "SearchAddOn", "Interval", @@ -82,6 +83,26 @@ class SolutionType(proto.Enum): SOLUTION_TYPE_GENERATIVE_CHAT = 4 +class SearchUseCase(proto.Enum): + r"""Defines a further subdivision of ``SolutionType``. Specifically + applies to + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + + Values: + SEARCH_USE_CASE_UNSPECIFIED (0): + Value used when unset. Will not occur in CSS. + SEARCH_USE_CASE_SEARCH (1): + Search use case. Expects the traffic has a non-empty + [query][google.cloud.discoveryengine.v1beta.SearchRequest.query]. + SEARCH_USE_CASE_BROWSE (2): + Browse use case. Expects the traffic has an empty + [query][google.cloud.discoveryengine.v1beta.SearchRequest.query]. + """ + SEARCH_USE_CASE_UNSPECIFIED = 0 + SEARCH_USE_CASE_SEARCH = 1 + SEARCH_USE_CASE_BROWSE = 2 + + class SearchTier(proto.Enum): r"""Tiers of search features. Different tiers might have different pricing. To learn more, check the pricing diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py new file mode 100644 index 000000000000..9fa4d299a928 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "Condition", + "Control", + }, +) + + +class Condition(proto.Message): + r"""Defines circumstances to be checked before allowing a + behavior + + Attributes: + query_terms (MutableSequence[google.cloud.discoveryengine_v1beta.types.Condition.QueryTerm]): + Search only + A list of terms to match the query on. + + Maximum of 10 query terms. + active_time_range (MutableSequence[google.cloud.discoveryengine_v1beta.types.Condition.TimeRange]): + Range of time(s) specifying when condition is + active. + Maximum of 10 time ranges. + """ + + class QueryTerm(proto.Message): + r"""Matcher for search request query + + Attributes: + value (str): + The specific query value to match against + + Must be lowercase, must be UTF-8. Can have at most 3 space + separated terms if full_match is true. Cannot be an empty + string. Maximum length of 5000 characters. + full_match (bool): + Whether the search query needs to exactly + match the query term. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + full_match: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class TimeRange(proto.Message): + r"""Used for time-dependent conditions. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start of time range. + + Range is inclusive. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End of time range. + + Range is inclusive. + Must be in the future. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + query_terms: MutableSequence[QueryTerm] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=QueryTerm, + ) + active_time_range: MutableSequence[TimeRange] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TimeRange, + ) + + +class Control(proto.Message): + r"""Defines a conditioned behavior to employ during serving. Must be + attached to a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to be considered at serving time. Permitted actions dependent on + ``SolutionType``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + boost_action (google.cloud.discoveryengine_v1beta.types.Control.BoostAction): + Defines a boost-type control + + This field is a member of `oneof`_ ``action``. + filter_action (google.cloud.discoveryengine_v1beta.types.Control.FilterAction): + Defines a filter-type control + Currently not supported by Recommendation + + This field is a member of `oneof`_ ``action``. + redirect_action (google.cloud.discoveryengine_v1beta.types.Control.RedirectAction): + Defines a redirect-type control. + + This field is a member of `oneof`_ ``action``. + synonyms_action (google.cloud.discoveryengine_v1beta.types.Control.SynonymsAction): + Treats a group of terms as synonyms of one + another. + + This field is a member of `oneof`_ ``action``. + name (str): + Immutable. Fully qualified name + ``projects/*/locations/global/dataStore/*/controls/*`` + display_name (str): + Required. Human readable name. The identifier + used in UI views. + Must be UTF-8 encoded string. Length limit is + 128 characters. Otherwise an INVALID ARGUMENT + error is thrown. + associated_serving_config_ids (MutableSequence[str]): + Output only. List of all + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + ids this control is attached to. May take up to 10 minutes + to update after changes. + solution_type (google.cloud.discoveryengine_v1beta.types.SolutionType): + Required. Immutable. What solution the + control belongs to. + Must be compatible with vertical of resource. + Otherwise an INVALID ARGUMENT error is thrown. + use_cases (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchUseCase]): + Specifies the use case for the control. Affects what + condition fields can be set. Only applies to + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + Currently only allow one use case per control. Must be set + when solution_type is + [SolutionType.SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + conditions (MutableSequence[google.cloud.discoveryengine_v1beta.types.Condition]): + Determines when the associated action will + trigger. + Omit to always apply the action. + Currently only a single condition may be + specified. Otherwise an INVALID ARGUMENT error + is thrown. + """ + + class BoostAction(proto.Message): + r"""Adjusts order of products in returned list. + + Attributes: + boost (float): + Required. Strength of the boost, which should be in [-1, 1]. + Negative boost means demotion. Default is 0.0 (No-op). + filter (str): + Required. Specifies which products to apply + the boost to. + If no filter is provided all products will be + boosted (No-op). Syntax documentation: + + https://cloud.google.com/retail/docs/filter-and-order + Maximum length is 5000 characters. + Otherwise an INVALID ARGUMENT error is thrown. + data_store (str): + Required. Specifies which data store's documents can be + boosted by this control. Full data store name e.g. + projects/123/locations/global/collections/default_collection/dataStores/default_data_store + """ + + boost: float = proto.Field( + proto.FLOAT, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + data_store: str = proto.Field( + proto.STRING, + number=3, + ) + + class FilterAction(proto.Message): + r"""Specified which products may be included in results. + Uses same filter as boost. + + Attributes: + filter (str): + Required. A filter to apply on the matching + condition results. + Required + Syntax documentation: + + https://cloud.google.com/retail/docs/filter-and-order + Maximum length is 5000 characters. Otherwise an + INVALID ARGUMENT error is thrown. + data_store (str): + Required. Specifies which data store's documents can be + filtered by this control. Full data store name e.g. + projects/123/locations/global/collections/default_collection/dataStores/default_data_store + """ + + filter: str = proto.Field( + proto.STRING, + number=1, + ) + data_store: str = proto.Field( + proto.STRING, + number=2, + ) + + class RedirectAction(proto.Message): + r"""Redirects a shopper to the provided URI. + + Attributes: + redirect_uri (str): + Required. The URI to which the shopper will + be redirected. + Required. + URI must have length equal or less than 2000 + characters. Otherwise an INVALID ARGUMENT error + is thrown. + """ + + redirect_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + class SynonymsAction(proto.Message): + r"""Creates a set of terms that will act as synonyms of one + another. + Example: "happy" will also be considered as "glad", "glad" will + also be considered as "happy". + + Attributes: + synonyms (MutableSequence[str]): + Defines a set of synonyms. + Can specify up to 100 synonyms. + Must specify at least 2 synonyms. Otherwise an + INVALID ARGUMENT error is thrown. + """ + + synonyms: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + boost_action: BoostAction = proto.Field( + proto.MESSAGE, + number=6, + oneof="action", + message=BoostAction, + ) + filter_action: FilterAction = proto.Field( + proto.MESSAGE, + number=7, + oneof="action", + message=FilterAction, + ) + redirect_action: RedirectAction = proto.Field( + proto.MESSAGE, + number=9, + oneof="action", + message=RedirectAction, + ) + synonyms_action: SynonymsAction = proto.Field( + proto.MESSAGE, + number=10, + oneof="action", + message=SynonymsAction, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + associated_serving_config_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + solution_type: common.SolutionType = proto.Field( + proto.ENUM, + number=4, + enum=common.SolutionType, + ) + use_cases: MutableSequence[common.SearchUseCase] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=common.SearchUseCase, + ) + conditions: MutableSequence["Condition"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Condition", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control_service.py new file mode 100644 index 000000000000..a3b899dff539 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control_service.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import control as gcd_control + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "CreateControlRequest", + "UpdateControlRequest", + "DeleteControlRequest", + "GetControlRequest", + "ListControlsRequest", + "ListControlsResponse", + }, +) + + +class CreateControlRequest(proto.Message): + r"""Request for CreateControl method. + + Attributes: + parent (str): + Required. Full resource name of parent data store. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + control (google.cloud.discoveryengine_v1beta.types.Control): + Required. The Control to create. + control_id (str): + Required. The ID to use for the Control, which will become + the final component of the Control's resource name. + + This value must be within 1-63 characters. Valid characters + are /[a-z][0-9]-_/. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + control: gcd_control.Control = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_control.Control, + ) + control_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateControlRequest(proto.Message): + r"""Request for UpdateControl method. + + Attributes: + control (google.cloud.discoveryengine_v1beta.types.Control): + Required. The Control to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Indicates which fields in the provided + [Control][google.cloud.discoveryengine.v1beta.Control] to + update. The following are NOT supported: + + - [Control.name][google.cloud.discoveryengine.v1beta.Control.name] + - [Control.solution_type][google.cloud.discoveryengine.v1beta.Control.solution_type] + + If not set or empty, all supported fields are updated. + """ + + control: gcd_control.Control = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_control.Control, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteControlRequest(proto.Message): + r"""Request for DeleteControl method. + + Attributes: + name (str): + Required. The resource name of the Control to delete. + Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetControlRequest(proto.Message): + r"""Request for GetControl method. + + Attributes: + name (str): + Required. The resource name of the Control to get. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}/controls/{control_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListControlsRequest(proto.Message): + r"""Request for ListControls method. + + Attributes: + parent (str): + Required. The data store resource name. Format: + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/dataStores/{data_store_id}`` + or + ``projects/{project_number}/locations/{location_id}/collections/{collection_id}/engines/{engine_id}``. + page_size (int): + Optional. Maximum number of results to + return. If unspecified, defaults to 50. Max + allowed value is 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListControls`` call. Provide this to retrieve the + subsequent page. + filter (str): + Optional. A filter to apply on the list results. Supported + features: + + - List all the products under the parent branch if + [filter][google.cloud.discoveryengine.v1beta.ListControlsRequest.filter] + is unset. Currently this field is unsupported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListControlsResponse(proto.Message): + r"""Response for ListControls method. + + Attributes: + controls (MutableSequence[google.cloud.discoveryengine_v1beta.types.Control]): + All the Controls for a given data store. + next_page_token (str): + Pagination token, if not returned indicates + the last page. + """ + + @property + def raw_page(self): + return self + + controls: MutableSequence[gcd_control.Control] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_control.Control, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py index 28287fa4a98f..d1a8927746f2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py @@ -40,9 +40,9 @@ class Conversation(proto.Message): Attributes: name (str): Immutable. Fully qualified name - ``project/*/locations/global/collections/{collection}/dataStore/*/conversations/*`` + ``projects/{project}/locations/global/collections/{collection}/dataStore/*/conversations/*`` or - ``project/*/locations/global/collections/{collection}/engines/*/conversations/*``. + ``projects/{project}/locations/global/collections/{collection}/engines/*/conversations/*``. state (google.cloud.discoveryengine_v1beta.types.Conversation.State): The state of the Conversation. user_pseudo_id (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py index 2c96fff4d53b..43f8e3e5c3ea 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py @@ -392,12 +392,12 @@ class AnswerQueryRequest(proto.Message): session (str): The session resource name. Not required. - When session field is not set, the API is in - sessionless mode. + When session field is not set, the API is in sessionless + mode. - We support auto session mode: users can use the - wildcard symbol “-” as session id. A new id - will be automatically generated and assigned. + We support auto session mode: users can use the wildcard + symbol ``-`` as session ID. A new ID will be automatically + generated and assigned. safety_spec (google.cloud.discoveryengine_v1beta.types.AnswerQueryRequest.SafetySpec): Model specification. related_questions_spec (google.cloud.discoveryengine_v1beta.types.AnswerQueryRequest.RelatedQuestionsSpec): @@ -464,6 +464,8 @@ class RelatedQuestionsSpec(proto.Message): class AnswerGenerationSpec(proto.Message): r"""Answer generation specification. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: model_spec (google.cloud.discoveryengine_v1beta.types.AnswerQueryRequest.AnswerGenerationSpec.ModelSpec): Answer generation model specification. @@ -498,6 +500,16 @@ class AnswerGenerationSpec(proto.Message): field is set to ``true``, we skip generating answers for non-answer seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true`` or unset, the behavior will be determined + automatically by the service. + + This field is a member of `oneof`_ ``_ignore_low_relevant_content``. """ class ModelSpec(proto.Message): @@ -554,6 +566,11 @@ class PromptSpec(proto.Message): proto.BOOL, number=6, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) class SearchSpec(proto.Message): r"""Search specification. @@ -619,6 +636,13 @@ class SearchParams(proto.Message): If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. + data_store_specs (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchRequest.DataStoreSpec]): + Specs defining dataStores to filter on in a + search call and configurations for those + dataStores. This is only considered for engines + with multiple dataStores use case. For single + dataStore within an engine, they should use the + specs at the top level. """ max_return_results: int = proto.Field( @@ -638,6 +662,13 @@ class SearchParams(proto.Message): proto.STRING, number=4, ) + data_store_specs: MutableSequence[ + search_service.SearchRequest.DataStoreSpec + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=search_service.SearchRequest.DataStoreSpec, + ) class SearchResultList(proto.Message): r"""Search result list. @@ -883,7 +914,7 @@ class QueryRephraserSpec(proto.Message): Disable query rephraser. max_rephrase_steps (int): Max rephrase steps. - The max number is 10 steps. + The max number is 5 steps. If not set or set to < 1, it will be set to 1 by default. """ @@ -975,6 +1006,8 @@ class AnswerQueryResponse(proto.Message): session field is set and valid in the [AnswerQueryRequest][google.cloud.discoveryengine.v1beta.AnswerQueryRequest] request. + answer_query_token (str): + A global unique ID used for logging. """ answer: gcd_answer.Answer = proto.Field( @@ -987,6 +1020,10 @@ class AnswerQueryResponse(proto.Message): number=2, message=gcd_session.Session, ) + answer_query_token: str = proto.Field( + proto.STRING, + number=3, + ) class GetAnswerRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py new file mode 100644 index 000000000000..017f7af6efd8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "CustomTuningModel", + }, +) + + +class CustomTuningModel(proto.Message): + r"""Metadata that describes a custom tuned model. + + Attributes: + name (str): + Required. The fully qualified resource name of the model. + + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}`` + model must be an alpha-numerical string with limit of 40 + characters. + display_name (str): + The display name of the model. + model_version (int): + The version of the model. + model_state (google.cloud.discoveryengine_v1beta.types.CustomTuningModel.ModelState): + The state that the model is in (e.g.``TRAINING`` or + ``TRAINING_FAILED``). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the Model was created at. + training_start_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the model training was initiated. + """ + + class ModelState(proto.Enum): + r"""The state of the model. + + Values: + MODEL_STATE_UNSPECIFIED (0): + Default value. + TRAINING_PAUSED (1): + The model is in a paused training state. + TRAINING (2): + The model is currently training. + TRAINING_COMPLETE (3): + The model has successfully completed + training. + READY_FOR_SERVING (4): + The model is ready for serving. + TRAINING_FAILED (5): + The model training failed. + """ + MODEL_STATE_UNSPECIFIED = 0 + TRAINING_PAUSED = 1 + TRAINING = 2 + TRAINING_COMPLETE = 3 + READY_FOR_SERVING = 4 + TRAINING_FAILED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + model_version: int = proto.Field( + proto.INT64, + number=3, + ) + model_state: ModelState = proto.Field( + proto.ENUM, + number=4, + enum=ModelState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + training_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py index d0065269ed1d..a42371276125 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py @@ -175,8 +175,8 @@ class ListDataStoresRequest(proto.Message): must match the call that provided the page token. Otherwise, an INVALID_ARGUMENT error is returned. filter (str): - Filter by solution type. For example: filter = - 'solution_type:SOLUTION_TYPE_SEARCH' + Filter by solution type . For example: + ``filter = 'solution_type:SOLUTION_TYPE_SEARCH'`` """ parent: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py index 608caf7bd441..52f0c0f67f71 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py @@ -83,7 +83,7 @@ class ListDocumentsRequest(proto.Message): Maximum number of [Document][google.cloud.discoveryengine.v1beta.Document]s to return. If unspecified, defaults to 100. The maximum allowed - value is 1000. Values above 1000 will be coerced to 1000. + value is 1000. Values above 1000 are set to 1000. If this field is negative, an ``INVALID_ARGUMENT`` error is returned. @@ -161,7 +161,7 @@ class CreateDocumentRequest(proto.Message): document_id (str): Required. The ID to use for the [Document][google.cloud.discoveryengine.v1beta.Document], - which will become the final component of the + which becomes the final component of the [Document.name][google.cloud.discoveryengine.v1beta.Document.name]. If the caller does not have permission to create the @@ -216,15 +216,15 @@ class UpdateDocumentRequest(proto.Message): [allow_missing][google.cloud.discoveryengine.v1beta.UpdateDocumentRequest.allow_missing] is not set, a ``NOT_FOUND`` error is returned. allow_missing (bool): - If set to true, and the + If set to ``true`` and the [Document][google.cloud.discoveryengine.v1beta.Document] is not found, a new - [Document][google.cloud.discoveryengine.v1beta.Document] - will be created. + [Document][google.cloud.discoveryengine.v1beta.Document] is + be created. update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided - imported 'document' to update. If not set, will - by default update all fields. + imported 'document' to update. If not set, by + default updates all fields. """ document: gcd_document.Document = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py index 85b3133d0b8e..59b81842de63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py @@ -116,8 +116,7 @@ class Engine(proto.Message): restriction of the Engine industry vertical is based on [DataStore][google.cloud.discoveryengine.v1beta.DataStore]: If unspecified, default to ``GENERIC``. Vertical on Engine - has to match vertical of the DataStore liniked to the - engine. + has to match vertical of the DataStore linked to the engine. common_config (google.cloud.discoveryengine_v1beta.types.Engine.CommonConfig): Common config spec that specifies the metadata of the engine. @@ -253,10 +252,9 @@ class CommonConfig(proto.Message): Attributes: company_name (str): - Immutable. The name of the company, business - or entity that is associated with the engine. - Setting this may help improve LLM related - features. + The name of the company, business or entity + that is associated with the engine. Setting this + may help improve LLM related features. """ company_name: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py index d88ba576cd47..3734ca3d5c68 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py @@ -65,12 +65,35 @@ class CheckGroundingRequest(proto.Message): Required. The resource name of the grounding config, such as ``projects/*/locations/global/groundingConfigs/default_grounding_config``. answer_candidate (str): - Answer candidate to check. + Answer candidate to check. Can have a maximum + length of 1024 characters. facts (MutableSequence[google.cloud.discoveryengine_v1beta.types.GroundingFact]): List of facts for the grounding check. We support up to 200 facts. grounding_spec (google.cloud.discoveryengine_v1beta.types.CheckGroundingSpec): Configuration of the grounding check. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. """ grounding_config: str = proto.Field( @@ -91,6 +114,11 @@ class CheckGroundingRequest(proto.Message): number=4, message="CheckGroundingSpec", ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) class CheckGroundingResponse(proto.Message): @@ -145,6 +173,18 @@ class Claim(proto.Message): means that cited_chunks[1], cited_chunks[3], cited_chunks[4] are the facts cited supporting for the claim. A citation to a fact indicates that the claim is supported by the fact. + grounding_check_required (bool): + Indicates that this claim required grounding check. When the + system decided this claim doesn't require + attribution/grounding check, this field will be set to + false. In that case, no grounding check was done for the + claim and therefore + [citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.citation_indices], + and + [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices] + should not be returned. + + This field is a member of `oneof`_ ``_grounding_check_required``. """ start_pos: int = proto.Field( @@ -165,6 +205,11 @@ class Claim(proto.Message): proto.INT32, number=4, ) + grounding_check_required: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) support_score: float = proto.Field( proto.FLOAT, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounding.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounding.py index 0750fbfc2414..571f86b91515 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounding.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounding.py @@ -66,6 +66,9 @@ class FactChunk(proto.Message): GroundingFacts provided in the request then this field will contain the index of the specific fact from which this chunk was retrieved. + index (int): + The index of this chunk. Currently, only used + for the streaming mode. source_metadata (MutableMapping[str, str]): More fine-grained information for the source reference. @@ -79,6 +82,10 @@ class FactChunk(proto.Message): proto.STRING, number=2, ) + index: int = proto.Field( + proto.INT32, + number=4, + ) source_metadata: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py index 66c6771b4d2a..85771a9ca7cc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py @@ -55,10 +55,10 @@ class GcsSource(proto.Message): Attributes: input_uris (MutableSequence[str]): - Required. Cloud Storage URIs to input files. URI can be up - to 2000 characters long. URIs can match the full object path - (for example, ``gs://bucket/directory/object.json``) or a - pattern matching one or more files, such as + Required. Cloud Storage URIs to input files. Each URI can be + up to 2000 characters long. URIs can match the full object + path (for example, ``gs://bucket/directory/object.json``) or + a pattern matching one or more files, such as ``gs://bucket/directory/*.json``. A request can contain at most 100 files (or 100,000 files if @@ -88,7 +88,7 @@ class GcsSource(proto.Message): as a Document. This can only be used by the GENERIC Data Store vertical. - Supported values for user even imports: + Supported values for user event imports: - ``user_event`` (default): One JSON [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] @@ -551,9 +551,9 @@ class FirestoreSource(proto.Message): Required. The Firestore database to copy the data from with a length limit of 256 characters. collection_id (str): - Required. The Firestore collection to copy - the data from with a length limit of 1,500 - characters. + Required. The Firestore collection (or + entity) to copy the data from with a length + limit of 1,500 characters. gcs_staging_dir (str): Intermediate Cloud Storage directory used for the import with a length limit of 2,000 diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/project.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/project.py new file mode 100644 index 000000000000..21e39ec3fc5c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/project.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "Project", + }, +) + + +class Project(proto.Message): + r"""Metadata and configurations for a Google Cloud project in the + service. + + Attributes: + name (str): + Output only. Full resource name of the project, for example + ``projects/{project_number}``. Note that when making + requests, project number and project id are both acceptable, + but the server will always respond in project number. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this project + is created. + provision_completion_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this project + is successfully provisioned. Empty value means + this project is still provisioning and is not + ready for use. + service_terms_map (MutableMapping[str, google.cloud.discoveryengine_v1beta.types.Project.ServiceTerms]): + Output only. A map of terms of services. The key is the + ``id`` of + [ServiceTerms][google.cloud.discoveryengine.v1beta.Project.ServiceTerms]. + """ + + class ServiceTerms(proto.Message): + r"""Metadata about the terms of service. + + Attributes: + id (str): + The unique identifier of this terms of service. Available + terms: + + - ``GA_DATA_USE_TERMS``: `Terms for data + use `__. + When using this as ``id``, the acceptable + [version][google.cloud.discoveryengine.v1beta.Project.ServiceTerms.version] + to provide is ``2022-11-23``. + version (str): + The version string of the terms of service. For acceptable + values, see the comments for + [id][google.cloud.discoveryengine.v1beta.Project.ServiceTerms.id] + above. + state (google.cloud.discoveryengine_v1beta.types.Project.ServiceTerms.State): + Whether the project has accepted/rejected the + service terms or it is still pending. + accept_time (google.protobuf.timestamp_pb2.Timestamp): + The last time when the project agreed to the + terms of service. + decline_time (google.protobuf.timestamp_pb2.Timestamp): + The last time when the project declined or + revoked the agreement to terms of service. + """ + + class State(proto.Enum): + r"""The agreement states this terms of service. + + Values: + STATE_UNSPECIFIED (0): + The default value of the enum. This value is + not actually used. + TERMS_ACCEPTED (1): + The project has given consent to the terms of + service. + TERMS_PENDING (2): + The project is pending to review and accept + the terms of service. + TERMS_DECLINED (3): + The project has declined or revoked the + agreement to terms of service. + """ + STATE_UNSPECIFIED = 0 + TERMS_ACCEPTED = 1 + TERMS_PENDING = 2 + TERMS_DECLINED = 3 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + state: "Project.ServiceTerms.State" = proto.Field( + proto.ENUM, + number=4, + enum="Project.ServiceTerms.State", + ) + accept_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + decline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + provision_completion_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + service_terms_map: MutableMapping[str, ServiceTerms] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message=ServiceTerms, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/project_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/project_service.py new file mode 100644 index 000000000000..9471ed71c224 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/project_service.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "ProvisionProjectRequest", + "ProvisionProjectMetadata", + }, +) + + +class ProvisionProjectRequest(proto.Message): + r"""Request for + [ProjectService.ProvisionProject][google.cloud.discoveryengine.v1beta.ProjectService.ProvisionProject] + method. + + Attributes: + name (str): + Required. Full resource name of a + [Project][google.cloud.discoveryengine.v1beta.Project], such + as ``projects/{project_id_or_number}``. + accept_data_use_terms (bool): + Required. Set to ``true`` to specify that caller has read + and would like to give consent to the `Terms for data + use `__. + data_use_terms_version (str): + Required. The version of the `Terms for data + use `__ that + caller has read and would like to give consent to. + + Acceptable version is ``2022-11-23``, and this may change + over time. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + accept_data_use_terms: bool = proto.Field( + proto.BOOL, + number=2, + ) + data_use_terms_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ProvisionProjectMetadata(proto.Message): + r"""Metadata associated with a project provision operation.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/rank_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/rank_service.py index c7396cf30124..b1da68f767e7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/rank_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/rank_service.py @@ -103,6 +103,28 @@ class RankRequest(proto.Message): If true, the response will contain only record ID and score. By default, it is false, the response will contain record details. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. """ ranking_config: str = proto.Field( @@ -130,6 +152,11 @@ class RankRequest(proto.Message): proto.BOOL, number=6, ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) class RankResponse(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py index d5e4f58f6aba..3c4b9068ea3d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py @@ -44,9 +44,8 @@ class RecommendRequest(proto.Message): ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` One default serving config is created along with your - recommendation engine creation. The engine ID will be used - as the ID of the default serving config. For example, for - Engine + recommendation engine creation. The engine ID is used as the + ID of the default serving config. For example, for Engine ``projects/*/locations/global/collections/*/engines/my-engine``, you can use ``projects/*/locations/global/collections/*/engines/my-engine/servingConfigs/my-engine`` @@ -75,9 +74,9 @@ class RecommendRequest(proto.Message): page_size (int): Maximum number of results to return. Set this property to the number of recommendation results - needed. If zero, the service will choose a + needed. If zero, the service chooses a reasonable default. The maximum allowed value is - 100. Values above 100 will be coerced to 100. + 100. Values above 100 are set to 100. filter (str): Filter for restricting recommendation results with a length limit of 5,000 characters. Currently, only filter @@ -98,41 +97,39 @@ class RecommendRequest(proto.Message): - (available: true) AND (launguage: ANY("en", "es")) OR (categories: ANY("Movie")) - If your filter blocks all results, the API will return - generic (unfiltered) popular Documents. If you only want - results strictly matching the filters, set - ``strictFiltering`` to True in + If your filter blocks all results, the API returns generic + (unfiltered) popular Documents. If you only want results + strictly matching the filters, set ``strictFiltering`` to + ``true`` in [RecommendRequest.params][google.cloud.discoveryengine.v1beta.RecommendRequest.params] to receive empty results instead. - Note that the API will never return + Note that the API never returns [Document][google.cloud.discoveryengine.v1beta.Document]s - with ``storageStatus`` of ``EXPIRED`` or ``DELETED`` + with ``storageStatus`` as ``EXPIRED`` or ``DELETED`` regardless of filter choices. validate_only (bool): - Use validate only mode for this - recommendation query. If set to true, a fake - model will be used that returns arbitrary - Document IDs. Note that the validate only mode - should only be used for testing the API, or if - the model is not ready. + Use validate only mode for this recommendation query. If set + to ``true``, a fake model is used that returns arbitrary + Document IDs. Note that the validate only mode should only + be used for testing the API, or if the model is not ready. params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional domain specific parameters for the recommendations. Allowed values: - - ``returnDocument``: Boolean. If set to true, the - associated Document object will be returned in + - ``returnDocument``: Boolean. If set to ``true``, the + associated Document object is returned in [RecommendResponse.RecommendationResult.document][google.cloud.discoveryengine.v1beta.RecommendResponse.RecommendationResult.document]. - ``returnScore``: Boolean. If set to true, the - recommendation 'score' corresponding to each returned - Document will be set in + recommendation score corresponding to each returned + Document is set in [RecommendResponse.RecommendationResult.metadata][google.cloud.discoveryengine.v1beta.RecommendResponse.RecommendationResult.metadata]. - The given 'score' indicates the probability of a Document + The given score indicates the probability of a Document conversion given the user's context and history. - ``strictFiltering``: Boolean. True by default. If set to - false, the service will return generic (unfiltered) + ``false``, the service returns generic (unfiltered) popular Documents instead of empty if your filter blocks all recommendation results. - ``diversityLevel``: String. Default empty. If set to be @@ -241,7 +238,7 @@ class RecommendationResult(proto.Message): Set if ``returnDocument`` is set to true in [RecommendRequest.params][google.cloud.discoveryengine.v1beta.RecommendRequest.params]. metadata (MutableMapping[str, google.protobuf.struct_pb2.Value]): - Additional Document metadata / annotations. + Additional Document metadata or annotations. Possible values: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py index e35aa31033de..4d070629d9cb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py @@ -72,11 +72,11 @@ class ListSchemasRequest(proto.Message): return. The service may return fewer than this value. If unspecified, at most 100 - [Schema][google.cloud.discoveryengine.v1beta.Schema]s will - be returned. + [Schema][google.cloud.discoveryengine.v1beta.Schema]s are + returned. - The maximum value is 1000; values above 1000 will be coerced - to 1000. + The maximum value is 1000; values above 1000 are set to + 1000. page_token (str): A page token, received from a previous [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] @@ -148,7 +148,7 @@ class CreateSchemaRequest(proto.Message): schema_id (str): Required. The ID to use for the [Schema][google.cloud.discoveryengine.v1beta.Schema], which - will become the final component of the + becomes the final component of the [Schema.name][google.cloud.discoveryengine.v1beta.Schema.name]. This field should conform to @@ -185,7 +185,7 @@ class UpdateSchemaRequest(proto.Message): If set to true, and the [Schema][google.cloud.discoveryengine.v1beta.Schema] is not found, a new - [Schema][google.cloud.discoveryengine.v1beta.Schema] will be + [Schema][google.cloud.discoveryengine.v1beta.Schema] is created. In this situation, ``update_mask`` is ignored. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py index 86aa2cc5ab3e..b6e2a2b40700 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -92,8 +92,12 @@ class SearchRequest(proto.Message): If this field is negative, an ``INVALID_ARGUMENT`` is returned. data_store_specs (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchRequest.DataStoreSpec]): - A list of data store specs to apply on a - search call. + Specs defining dataStores to filter on in a + search call and configurations for those + dataStores. This is only considered for engines + with multiple dataStores use case. For single + dataStore within an engine, they should use the + specs at the top level. filter (str): The filter syntax consists of an expression language for constructing a predicate from one or more fields of the @@ -132,8 +136,9 @@ class SearchRequest(proto.Message): ordered by a field in an [Document][google.cloud.discoveryengine.v1beta.Document] object. Leave it unset if ordered by relevance. ``order_by`` - expression is case-sensitive. For more information on - ordering, see + expression is case-sensitive. + + For more information on ordering for retail search, see `Ordering `__ If this field is unrecognizable, an ``INVALID_ARGUMENT`` is @@ -152,7 +157,7 @@ class SearchRequest(proto.Message): boost_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.BoostSpec): Boost specification to boost certain documents. For more information on boosting, see - `Boosting `__ + `Boosting `__ params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional search parameters. @@ -160,7 +165,8 @@ class SearchRequest(proto.Message): - ``user_country_code``: string. Default empty. If set to non-empty, results are restricted or boosted based on the - location provided. Example: user_country_code: "au" + location provided. For example, + ``user_country_code: "au"`` For available codes see `Country Codes `__ @@ -168,7 +174,7 @@ class SearchRequest(proto.Message): - ``search_type``: double. Default empty. Enables non-webpage searching depending on the value. The only valid non-default value is 1, which enables image - searching. Example: search_type: 1 + searching. For example, ``search_type: 1`` query_expansion_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the conditions under which query @@ -278,7 +284,10 @@ class ImageQuery(proto.Message): ) class DataStoreSpec(proto.Message): - r"""A struct to define data stores to filter on in a search call. + r"""A struct to define data stores to filter on in a search call and + configurations for those data stores. A maximum of 1 DataStoreSpec + per data_store is allowed. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. Attributes: data_store (str): @@ -300,9 +309,9 @@ class FacetSpec(proto.Message): facet_key (google.cloud.discoveryengine_v1beta.types.SearchRequest.FacetSpec.FacetKey): Required. The facet key specification. limit (int): - Maximum of facet values that should be returned for this - facet. If unspecified, defaults to 20. The maximum allowed - value is 300. Values above 300 are coerced to 300. + Maximum facet values that are returned for this facet. If + unspecified, defaults to 20. The maximum allowed value is + 300. Values above 300 are coerced to 300. If this field is negative, an ``INVALID_ARGUMENT`` is returned. @@ -404,7 +413,7 @@ class FacetKey(proto.Message): 2021". Only supported on textual fields. Maximum is 10. contains (MutableSequence[str]): - Only get facet values that contains the given + Only get facet values that contain the given strings. For example, suppose "category" has three values "Action > 2022", "Action > 2021" and "Sci-Fi > 2022". If set "contains" to @@ -582,7 +591,7 @@ class AttributeType(proto.Enum): datetime field specified. The value must be formatted as an XSD ``dayTimeDuration`` value (a restricted subset of an ISO 8601 duration value). The pattern for this is: - ``[nD][T[nH][nM][nS]]``. E.g. ``5D``, ``3DT12H30M``, + ``[nD][T[nH][nM][nS]]``. For example, ``5D``, ``3DT12H30M``, ``T24H``. """ ATTRIBUTE_TYPE_UNSPECIFIED = 0 @@ -729,8 +738,8 @@ class SpellCorrectionSpec(proto.Message): Attributes: mode (google.cloud.discoveryengine_v1beta.types.SearchRequest.SpellCorrectionSpec.Mode): - The mode under which spell correction should take effect to - replace the original search query. Default to + The mode under which spell correction replaces the original + search query. Defaults to [Mode.AUTO][google.cloud.discoveryengine.v1beta.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. """ @@ -744,10 +753,10 @@ class Mode(proto.Enum): behavior defaults to [Mode.AUTO][google.cloud.discoveryengine.v1beta.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. SUGGESTION_ONLY (1): - Search API will try to find a spell suggestion if there is - any and put in the + Search API tries to find a spelling suggestion. If a + suggestion is found, it is put in the [SearchResponse.corrected_query][google.cloud.discoveryengine.v1beta.SearchResponse.corrected_query]. - The spell suggestion will not be used as the search query. + The spelling suggestion won't be used as the search query. AUTO (2): Automatic spell correction built by the Search API. Search will be based on the @@ -823,7 +832,12 @@ class SummarySpec(proto.Message): ``summaryResultCount``, the summary is generated from all of the results. - At most 10 results can be used to generate a summary. + At most 10 results for documents mode, or 50 for chunks + mode, can be used to generate a summary. The chunks mode is + used when + [SearchRequest.ContentSearchSpec.search_result_mode][] is + set to + [CHUNKS][SearchRequest.ContentSearchSpec.SearchResultMode.CHUNKS]. include_citations (bool): Specifies whether to include citations in the summary. The default value is ``false``. @@ -1254,9 +1268,7 @@ class SearchResponse(proto.Message): Controls applied as part of the Control service. geo_search_debug_info (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.GeoSearchDebugInfo]): - Debug information specifically related to - forward geocoding issues arising from - Geolocation Search. + query_expansion_info (google.cloud.discoveryengine_v1beta.types.SearchResponse.QueryExpansionInfo): Query expansion information for the returned results. @@ -1271,9 +1283,8 @@ class SearchResult(proto.Message): of the searched [Document][google.cloud.discoveryengine.v1beta.Document]. document (google.cloud.discoveryengine_v1beta.types.Document): - The document data snippet in the search - response. Only fields that are marked as - retrievable are populated. + The document data snippet in the search response. Only + fields that are marked as ``retrievable`` are populated. model_scores (MutableMapping[str, google.cloud.discoveryengine_v1beta.types.DoubleList]): Google provided available scores. """ @@ -1299,8 +1310,8 @@ class Facet(proto.Message): Attributes: key (str): - The key for this facet. E.g., "colors" or "price". It - matches + The key for this facet. For example, ``"colors"`` or + ``"price"``. It matches [SearchRequest.FacetSpec.FacetKey.key][google.cloud.discoveryengine.v1beta.SearchRequest.FacetSpec.FacetKey.key]. values (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.Facet.FacetValue]): The facet values for this field. @@ -1383,10 +1394,11 @@ class RefinementAttribute(proto.Message): Attributes: attribute_key (str): - Attribute key used to refine the results e.g. 'movie_type'. + Attribute key used to refine the results. For example, + ``"movie_type"``. attribute_value (str): - Attribute value used to refine the results - e.g. 'drama'. + Attribute value used to refine the results. For example, + ``"drama"``. """ attribute_key: str = proto.Field( @@ -1411,7 +1423,7 @@ class RefinementAttribute(proto.Message): ) class Summary(proto.Message): - r"""Summary of the top N search result specified by the summary + r"""Summary of the top N search results specified by the summary spec. Attributes: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_tuning_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_tuning_service.py index ca89e976a977..1e4b9b45630c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_tuning_service.py @@ -21,11 +21,13 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.discoveryengine_v1beta.types import import_config +from google.cloud.discoveryengine_v1beta.types import custom_tuning_model, import_config __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", manifest={ + "ListCustomModelsRequest", + "ListCustomModelsResponse", "TrainCustomModelRequest", "TrainCustomModelResponse", "TrainCustomModelMetadata", @@ -33,6 +35,45 @@ ) +class ListCustomModelsRequest(proto.Message): + r"""Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + + Attributes: + data_store (str): + Required. The resource name of the parent Data Store, such + as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to fetch + the models from. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCustomModelsResponse(proto.Message): + r"""Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels] + method. + + Attributes: + models (MutableSequence[google.cloud.discoveryengine_v1beta.types.CustomTuningModel]): + List of custom tuning models. + """ + + models: MutableSequence[ + custom_tuning_model.CustomTuningModel + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=custom_tuning_model.CustomTuningModel, + ) + + class TrainCustomModelRequest(proto.Message): r"""Request message for [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1beta.SearchTuningService.TrainCustomModel] @@ -59,6 +100,8 @@ class TrainCustomModelRequest(proto.Message): error_config (google.cloud.discoveryengine_v1beta.types.ImportErrorConfig): The desired location of errors incurred during the data ingestion and training. + model_id (str): + If not provided, a UUID will be generated. """ class GcsTrainingInput(proto.Message): @@ -137,6 +180,10 @@ class GcsTrainingInput(proto.Message): number=4, message=import_config.ImportErrorConfig, ) + model_id: str = proto.Field( + proto.STRING, + number=5, + ) class TrainCustomModelResponse(proto.Message): @@ -166,6 +213,9 @@ class TrainCustomModelResponse(proto.Message): - **ready**: The model is ready for serving. metrics (MutableMapping[str, float]): The metrics of the trained model. + model_name (str): + Fully qualified name of the + CustomTuningModel. """ error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( @@ -187,6 +237,10 @@ class TrainCustomModelResponse(proto.Message): proto.DOUBLE, number=4, ) + model_name: str = proto.Field( + proto.STRING, + number=5, + ) class TrainCustomModelMetadata(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/session.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/session.py index 527789c3d771..4269306d3d74 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/session.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/session.py @@ -35,7 +35,7 @@ class Session(proto.Message): Attributes: name (str): Immutable. Fully qualified name - ``project/*/locations/global/collections/{collection}/engines/{engine}/sessions/*`` + ``projects/{project}/locations/global/collections/{collection}/engines/{engine}/sessions/*`` state (google.cloud.discoveryengine_v1beta.types.Session.State): The state of the session. user_pseudo_id (str): @@ -70,6 +70,8 @@ class Turn(proto.Message): answer (str): The resource name of the answer to the user query. + Only set if the answer generation (/answer API + call) happened in this turn. """ query: "Query" = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py index d821f3f8d944..dbf6bac68bf7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py @@ -75,6 +75,8 @@ class TargetSite(proto.Message): generated_uri_pattern (str): Output only. This is system-generated based on the provided_uri_pattern. + root_domain_uri (str): + Output only. Root domain of the provided_uri_pattern. site_verification_info (google.cloud.discoveryengine_v1beta.types.SiteVerificationInfo): Output only. Site ownership and validity verification status. @@ -188,6 +190,10 @@ class QuotaFailure(proto.Message): proto.STRING, number=4, ) + root_domain_uri: str = proto.Field( + proto.STRING, + number=10, + ) site_verification_info: "SiteVerificationInfo" = proto.Field( proto.MESSAGE, number=7, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py index dc6c1ee72dce..23500fae2a57 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py @@ -40,7 +40,7 @@ class UserEvent(proto.Message): r"""UserEvent captures all metadata information Discovery Engine - API needs to know about how end users interact with customers' + API needs to know about how end users interact with your website. Attributes: @@ -89,6 +89,28 @@ class UserEvent(proto.Message): to use Google Analytics `Client ID `__ for this field. + engine (str): + The [Engine][google.cloud.discoveryengine.v1beta.Engine] + resource name, in the form of + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + Optional. Only required for + [Engine][google.cloud.discoveryengine.v1beta.Engine] + produced user events. For example, user events from blended + search. + data_store (str): + The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + resource full name, of the form + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + Optional. Only required for user events whose data store + can't by determined by + [UserEvent.engine][google.cloud.discoveryengine.v1beta.UserEvent.engine] + or + [UserEvent.documents][google.cloud.discoveryengine.v1beta.UserEvent.documents]. + If data store is set in the parent of write/import/collect + user event requests, this field can be omitted. event_time (google.protobuf.timestamp_pb2.Timestamp): Only required for [UserEventService.ImportUserEvents][google.cloud.discoveryengine.v1beta.UserEventService.ImportUserEvents] @@ -217,8 +239,7 @@ class UserEvent(proto.Message): A list of identifiers for the independent experiment groups this user event belongs to. This is used to distinguish between user events - associated with different experiment setups on - the customer end. + associated with different experiment setups. promotion_ids (MutableSequence[str]): The promotion IDs if this is an event associated with promotions. Currently, this @@ -264,6 +285,14 @@ class UserEvent(proto.Message): proto.STRING, number=2, ) + engine: str = proto.Field( + proto.STRING, + number=19, + ) + data_store: str = proto.Field( + proto.STRING, + number=20, + ) event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, @@ -631,8 +660,8 @@ class DocumentInfo(proto.Message): Quantity of the Document associated with the user event. Defaults to 1. - For example, this field will be 2 if two quantities of the - same Document are involved in a ``add-to-cart`` event. + For example, this field is 2 if two quantities of the same + Document are involved in a ``add-to-cart`` event. Required for events of the following event types: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py index 6119f8135ece..6da74fada52a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py @@ -37,12 +37,25 @@ class WriteUserEventRequest(proto.Message): Attributes: parent (str): - Required. The parent DataStore resource name, such as + Required. The parent resource name. If the write user event + action is applied in + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + level, the format is: ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + If the write user event action is applied in [Location][] + level, for example, the event with + [Document][google.cloud.discoveryengine.v1beta.Document] + across multiple + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + the format is: ``projects/{project}/locations/{location}``. user_event (google.cloud.discoveryengine_v1beta.types.UserEvent): Required. User event to write. This field is a member of `oneof`_ ``_user_event``. + write_async (bool): + If set to true, the user event is written + asynchronously after validation, and the API + responds without waiting for the write. """ parent: str = proto.Field( @@ -55,6 +68,10 @@ class WriteUserEventRequest(proto.Message): optional=True, message=gcd_user_event.UserEvent, ) + write_async: bool = proto.Field( + proto.BOOL, + number=3, + ) class CollectUserEventRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_create_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_create_control_async.py new file mode 100644 index 000000000000..3a468e6d8a18 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_create_control_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_CreateControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_control(): + # Create a client + client = discoveryengine_v1.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = await client.create_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ControlService_CreateControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_create_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_create_control_sync.py new file mode 100644 index 000000000000..4126b4aaa9d6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_create_control_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_CreateControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_control(): + # Create a client + client = discoveryengine_v1.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = client.create_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ControlService_CreateControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_delete_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_delete_control_async.py new file mode 100644 index 000000000000..b3a39aeb0603 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_delete_control_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_DeleteControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_control(): + # Create a client + client = discoveryengine_v1.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteControlRequest( + name="name_value", + ) + + # Make the request + await client.delete_control(request=request) + + +# [END discoveryengine_v1_generated_ControlService_DeleteControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_delete_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_delete_control_sync.py new file mode 100644 index 000000000000..3717aea1d490 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_delete_control_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_DeleteControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_control(): + # Create a client + client = discoveryengine_v1.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteControlRequest( + name="name_value", + ) + + # Make the request + client.delete_control(request=request) + + +# [END discoveryengine_v1_generated_ControlService_DeleteControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_get_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_get_control_async.py new file mode 100644 index 000000000000..ccb72fbf7480 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_get_control_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_GetControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_control(): + # Create a client + client = discoveryengine_v1.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetControlRequest( + name="name_value", + ) + + # Make the request + response = await client.get_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ControlService_GetControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_get_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_get_control_sync.py new file mode 100644 index 000000000000..da57cd7ca37f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_get_control_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_GetControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_control(): + # Create a client + client = discoveryengine_v1.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetControlRequest( + name="name_value", + ) + + # Make the request + response = client.get_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ControlService_GetControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_list_controls_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_list_controls_async.py new file mode 100644 index 000000000000..fb0bec97d69b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_list_controls_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_ListControls_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_controls(): + # Create a client + client = discoveryengine_v1.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_ControlService_ListControls_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_list_controls_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_list_controls_sync.py new file mode 100644 index 000000000000..8b5446dabcf4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_list_controls_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_ListControls_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_controls(): + # Create a client + client = discoveryengine_v1.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_ControlService_ListControls_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_update_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_update_control_async.py new file mode 100644 index 000000000000..55365ca8f4ad --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_update_control_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_UpdateControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_control(): + # Create a client + client = discoveryengine_v1.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.UpdateControlRequest( + control=control, + ) + + # Make the request + response = await client.update_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ControlService_UpdateControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_update_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_update_control_sync.py new file mode 100644 index 000000000000..497d7d2c7e63 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_control_service_update_control_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ControlService_UpdateControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_control(): + # Create a client + client = discoveryengine_v1.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1.UpdateControlRequest( + control=control, + ) + + # Make the request + response = client.update_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ControlService_UpdateControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_answer_query_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_answer_query_async.py new file mode 100644 index 000000000000..00682c3dbe09 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_answer_query_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnswerQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_AnswerQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_answer_query(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + query = discoveryengine_v1.Query() + query.text = "text_value" + + request = discoveryengine_v1.AnswerQueryRequest( + serving_config="serving_config_value", + query=query, + ) + + # Make the request + response = await client.answer_query(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_AnswerQuery_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_answer_query_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_answer_query_sync.py new file mode 100644 index 000000000000..7a02cd5041ec --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_answer_query_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnswerQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_AnswerQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_answer_query(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + query = discoveryengine_v1.Query() + query.text = "text_value" + + request = discoveryengine_v1.AnswerQueryRequest( + serving_config="serving_config_value", + query=query, + ) + + # Make the request + response = client.answer_query(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_AnswerQuery_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_create_session_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_create_session_async.py new file mode 100644 index 000000000000..36abd2f09105 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_create_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_CreateSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSessionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_session(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_CreateSession_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_create_session_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_create_session_sync.py new file mode 100644 index 000000000000..1fe46c6ef933 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_create_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_CreateSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSessionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_session(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_CreateSession_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_delete_session_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_delete_session_async.py new file mode 100644 index 000000000000..44c7176b3e90 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_delete_session_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_DeleteSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + await client.delete_session(request=request) + + +# [END discoveryengine_v1_generated_ConversationalSearchService_DeleteSession_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_delete_session_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_delete_session_sync.py new file mode 100644 index 000000000000..f8714a72fc8a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_delete_session_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_DeleteSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + client.delete_session(request=request) + + +# [END discoveryengine_v1_generated_ConversationalSearchService_DeleteSession_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_answer_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_answer_async.py new file mode 100644 index 000000000000..2c74db704ed0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_answer_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAnswer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_GetAnswer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_answer(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetAnswerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_answer(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_GetAnswer_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_answer_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_answer_sync.py new file mode 100644 index 000000000000..ce85016f3087 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_answer_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAnswer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_GetAnswer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_answer(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetAnswerRequest( + name="name_value", + ) + + # Make the request + response = client.get_answer(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_GetAnswer_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_session_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_session_async.py new file mode 100644 index 000000000000..61dc6cde8eda --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_GetSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_GetSession_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_session_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_session_sync.py new file mode 100644 index 000000000000..b5a2be673e0c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_get_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_GetSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_session(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_GetSession_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_list_sessions_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_list_sessions_async.py new file mode 100644 index 000000000000..788677a77871 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_list_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_ListSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_sessions(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_ListSessions_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_list_sessions_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_list_sessions_sync.py new file mode 100644 index 000000000000..b9d19415fb56 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_list_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_ListSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_sessions(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_ListSessions_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_update_session_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_update_session_async.py new file mode 100644 index 000000000000..5338bd45a2f9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_update_session_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_UpdateSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSessionRequest( + ) + + # Make the request + response = await client.update_session(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_UpdateSession_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_update_session_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_update_session_sync.py new file mode 100644 index 000000000000..f72737732c30 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_conversational_search_service_update_session_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ConversationalSearchService_UpdateSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_session(): + # Create a client + client = discoveryengine_v1.ConversationalSearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSessionRequest( + ) + + # Make the request + response = client.update_session(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ConversationalSearchService_UpdateSession_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_grounded_generation_service_check_grounding_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_grounded_generation_service_check_grounding_async.py new file mode 100644 index 000000000000..d4a7f943f85c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_grounded_generation_service_check_grounding_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckGrounding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_GroundedGenerationService_CheckGrounding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_check_grounding(): + # Create a client + client = discoveryengine_v1.GroundedGenerationServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CheckGroundingRequest( + grounding_config="grounding_config_value", + ) + + # Make the request + response = await client.check_grounding(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_GroundedGenerationService_CheckGrounding_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_grounded_generation_service_check_grounding_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_grounded_generation_service_check_grounding_sync.py new file mode 100644 index 000000000000..619f7d2434e1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_grounded_generation_service_check_grounding_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckGrounding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_GroundedGenerationService_CheckGrounding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_check_grounding(): + # Create a client + client = discoveryengine_v1.GroundedGenerationServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CheckGroundingRequest( + grounding_config="grounding_config_value", + ) + + # Make the request + response = client.check_grounding(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_GroundedGenerationService_CheckGrounding_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_project_service_provision_project_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_project_service_provision_project_async.py new file mode 100644 index 000000000000..a8197eccd46d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_project_service_provision_project_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ProvisionProject +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ProjectService_ProvisionProject_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_provision_project(): + # Create a client + client = discoveryengine_v1.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ProjectService_ProvisionProject_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_project_service_provision_project_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_project_service_provision_project_sync.py new file mode 100644 index 000000000000..08dc96754c56 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_project_service_provision_project_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ProvisionProject +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_ProjectService_ProvisionProject_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_provision_project(): + # Create a client + client = discoveryengine_v1.ProjectServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_ProjectService_ProvisionProject_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_rank_service_rank_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_rank_service_rank_async.py new file mode 100644 index 000000000000..388d5a794502 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_rank_service_rank_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rank +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_RankService_Rank_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_rank(): + # Create a client + client = discoveryengine_v1.RankServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RankRequest( + ranking_config="ranking_config_value", + ) + + # Make the request + response = await client.rank(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_RankService_Rank_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_rank_service_rank_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_rank_service_rank_sync.py new file mode 100644 index 000000000000..33b9c54c8540 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_rank_service_rank_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rank +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_RankService_Rank_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_rank(): + # Create a client + client = discoveryengine_v1.RankServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RankRequest( + ranking_config="ranking_config_value", + ) + + # Make the request + response = client.rank(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_RankService_Rank_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_create_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_create_control_async.py new file mode 100644 index 000000000000..4bade69927d3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_create_control_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_CreateControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_create_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = await client.create_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_CreateControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_create_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_create_control_sync.py new file mode 100644 index 000000000000..3acc8ed968cc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_create_control_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_CreateControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_create_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = client.create_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_CreateControl_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_delete_control_async.py similarity index 70% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py rename to packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_delete_control_async.py index c38516698208..9d8e4c8182f2 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_delete_control_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteDataset +# Snippet for DeleteControl # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-discoveryengine -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_async] +# [START discoveryengine_v1alpha_generated_ControlService_DeleteControl_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,20 +31,20 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import discoveryengine_v1alpha -async def sample_delete_dataset(): +async def sample_delete_control(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = discoveryengine_v1alpha.ControlServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( + request = discoveryengine_v1alpha.DeleteControlRequest( name="name_value", ) # Make the request - await client.delete_dataset(request=request) + await client.delete_control(request=request) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_async] +# [END discoveryengine_v1alpha_generated_ControlService_DeleteControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_delete_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_delete_control_sync.py new file mode 100644 index 000000000000..f935e8952edf --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_delete_control_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_DeleteControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_delete_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteControlRequest( + name="name_value", + ) + + # Make the request + client.delete_control(request=request) + + +# [END discoveryengine_v1alpha_generated_ControlService_DeleteControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_get_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_get_control_async.py new file mode 100644 index 000000000000..049c3808fd59 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_get_control_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_GetControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_get_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetControlRequest( + name="name_value", + ) + + # Make the request + response = await client.get_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_GetControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_get_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_get_control_sync.py new file mode 100644 index 000000000000..280eee2623d9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_get_control_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_GetControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_get_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetControlRequest( + name="name_value", + ) + + # Make the request + response = client.get_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_GetControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_list_controls_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_list_controls_async.py new file mode 100644 index 000000000000..f06b4afd1fe1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_list_controls_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_ListControls_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_list_controls(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_ListControls_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_list_controls_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_list_controls_sync.py new file mode 100644 index 000000000000..8b9d3c338f91 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_list_controls_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_ListControls_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_list_controls(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_ListControls_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_update_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_update_control_async.py new file mode 100644 index 000000000000..9c752d64809c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_update_control_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_UpdateControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_update_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.UpdateControlRequest( + control=control, + ) + + # Make the request + response = await client.update_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_UpdateControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_update_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_update_control_sync.py new file mode 100644 index 000000000000..4aa146a30349 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_control_service_update_control_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_ControlService_UpdateControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_update_control(): + # Create a client + client = discoveryengine_v1alpha.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1alpha.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1alpha.UpdateControlRequest( + control=control, + ) + + # Make the request + response = client.update_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_ControlService_UpdateControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_async.py index 0258cb3c2030..e49932d6908b 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_async.py @@ -41,7 +41,7 @@ async def sample_get_processed_document(): # Initialize request argument(s) request = discoveryengine_v1alpha.GetProcessedDocumentRequest( name="name_value", - processed_document_type="CHUNKED_DOCUMENT", + processed_document_type="PNG_CONVERTED_DOCUMENT", ) # Make the request diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_sync.py index 7581ccb4e33d..ff00ed1116c0 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_get_processed_document_sync.py @@ -41,7 +41,7 @@ def sample_get_processed_document(): # Initialize request argument(s) request = discoveryengine_v1alpha.GetProcessedDocumentRequest( name="name_value", - processed_document_type="CHUNKED_DOCUMENT", + processed_document_type="PNG_CONVERTED_DOCUMENT", ) # Make the request diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_async.py new file mode 100644 index 000000000000..7bdd00f06cb3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SearchTuningService_ListCustomModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SearchTuningService_ListCustomModels_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_sync.py new file mode 100644 index 000000000000..e3cf62fadbf7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SearchTuningService_ListCustomModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SearchTuningService_ListCustomModels_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_create_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_create_control_async.py new file mode 100644 index 000000000000..632753d107fc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_create_control_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_CreateControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_create_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = await client.create_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_CreateControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_create_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_create_control_sync.py new file mode 100644 index 000000000000..570deef72085 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_create_control_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_CreateControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_create_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.CreateControlRequest( + parent="parent_value", + control=control, + control_id="control_id_value", + ) + + # Make the request + response = client.create_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_CreateControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_delete_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_delete_control_async.py new file mode 100644 index 000000000000..c8c1d687ac40 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_delete_control_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_DeleteControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_delete_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteControlRequest( + name="name_value", + ) + + # Make the request + await client.delete_control(request=request) + + +# [END discoveryengine_v1beta_generated_ControlService_DeleteControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_delete_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_delete_control_sync.py new file mode 100644 index 000000000000..36cbf93b9d8f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_delete_control_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_DeleteControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_delete_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteControlRequest( + name="name_value", + ) + + # Make the request + client.delete_control(request=request) + + +# [END discoveryengine_v1beta_generated_ControlService_DeleteControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_get_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_get_control_async.py new file mode 100644 index 000000000000..3eaac47f1ac6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_get_control_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_GetControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetControlRequest( + name="name_value", + ) + + # Make the request + response = await client.get_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_GetControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_get_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_get_control_sync.py new file mode 100644 index 000000000000..5d7bbc383fdc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_get_control_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_GetControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetControlRequest( + name="name_value", + ) + + # Make the request + response = client.get_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_GetControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_list_controls_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_list_controls_async.py new file mode 100644 index 000000000000..7ba4c471913c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_list_controls_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_ListControls_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_controls(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_ListControls_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_list_controls_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_list_controls_sync.py new file mode 100644 index 000000000000..585a260fe5af --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_list_controls_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_ListControls_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_controls(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListControlsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_controls(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_ListControls_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_update_control_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_update_control_async.py new file mode 100644 index 000000000000..1bfdb8344d34 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_update_control_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_UpdateControl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_update_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceAsyncClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.UpdateControlRequest( + control=control, + ) + + # Make the request + response = await client.update_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_UpdateControl_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_update_control_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_update_control_sync.py new file mode 100644 index 000000000000..fa20bc22a9f9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_control_service_update_control_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateControl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ControlService_UpdateControl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_update_control(): + # Create a client + client = discoveryengine_v1beta.ControlServiceClient() + + # Initialize request argument(s) + control = discoveryengine_v1beta.Control() + control.boost_action.boost = 0.551 + control.boost_action.filter = "filter_value" + control.boost_action.data_store = "data_store_value" + control.display_name = "display_name_value" + control.solution_type = "SOLUTION_TYPE_GENERATIVE_CHAT" + + request = discoveryengine_v1beta.UpdateControlRequest( + control=control, + ) + + # Make the request + response = client.update_control(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ControlService_UpdateControl_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_project_service_provision_project_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_project_service_provision_project_async.py new file mode 100644 index 000000000000..f834e4affb17 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_project_service_provision_project_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ProvisionProject +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ProjectService_ProvisionProject_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_provision_project(): + # Create a client + client = discoveryengine_v1beta.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ProjectService_ProvisionProject_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_project_service_provision_project_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_project_service_provision_project_sync.py new file mode 100644 index 000000000000..f0dfebcec2ac --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_project_service_provision_project_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ProvisionProject +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ProjectService_ProvisionProject_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_provision_project(): + # Create a client + client = discoveryengine_v1beta.ProjectServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ProvisionProjectRequest( + name="name_value", + accept_data_use_terms=True, + data_use_terms_version="data_use_terms_version_value", + ) + + # Make the request + operation = client.provision_project(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ProjectService_ProvisionProject_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_async.py new file mode 100644 index 000000000000..8ec5dbfde8d8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SearchTuningService_ListCustomModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1beta.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SearchTuningService_ListCustomModels_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_sync.py new file mode 100644 index 000000000000..d8f6cd0398ea --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SearchTuningService_ListCustomModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1beta.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SearchTuningService_ListCustomModels_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 21e73dc22511..813d89469a75 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -475,30 +475,34 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.converse_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient.create_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.CreateControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "ConverseConversation" + "shortName": "CreateControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateControlRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "query", - "type": "google.cloud.discoveryengine_v1.types.TextInput" + "name": "control", + "type": "google.cloud.discoveryengine_v1.types.Control" + }, + { + "name": "control_id", + "type": "str" }, { "name": "retry", @@ -513,22 +517,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", - "shortName": "converse_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.Control", + "shortName": "create_control" }, - "description": "Sample for ConverseConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py", + "description": "Sample for CreateControl", + "file": "discoveryengine_v1_generated_control_service_create_control_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_async", + "regionTag": "discoveryengine_v1_generated_ControlService_CreateControl_async", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -538,51 +542,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 54, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 55, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py" + "title": "discoveryengine_v1_generated_control_service_create_control_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient", + "shortName": "ControlServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.converse_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient.create_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.CreateControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "ConverseConversation" + "shortName": "CreateControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateControlRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "query", - "type": "google.cloud.discoveryengine_v1.types.TextInput" + "name": "control", + "type": "google.cloud.discoveryengine_v1.types.Control" + }, + { + "name": "control_id", + "type": "str" }, { "name": "retry", @@ -597,22 +605,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", - "shortName": "converse_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.Control", + "shortName": "create_control" }, - "description": "Sample for ConverseConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py", + "description": "Sample for CreateControl", + "file": "discoveryengine_v1_generated_control_service_create_control_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_sync", + "regionTag": "discoveryengine_v1_generated_ControlService_CreateControl_sync", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -622,53 +630,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 54, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 55, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py" + "title": "discoveryengine_v1_generated_control_service_create_control_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.create_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient.delete_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.DeleteControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "CreateConversation" + "shortName": "DeleteControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteControlRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -682,22 +686,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "create_conversation" + "shortName": "delete_control" }, - "description": "Sample for CreateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py", + "description": "Sample for DeleteControl", + "file": "discoveryengine_v1_generated_control_service_delete_control_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_async", + "regionTag": "discoveryengine_v1_generated_ControlService_DeleteControl_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -712,47 +715,41 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py" + "title": "discoveryengine_v1_generated_control_service_delete_control_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient", + "shortName": "ControlServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.create_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient.delete_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.DeleteControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "CreateConversation" + "shortName": "DeleteControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteControlRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -766,22 +763,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "create_conversation" + "shortName": "delete_control" }, - "description": "Sample for CreateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py", + "description": "Sample for DeleteControl", + "file": "discoveryengine_v1_generated_control_service_delete_control_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_sync", + "regionTag": "discoveryengine_v1_generated_ControlService_DeleteControl_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -796,39 +792,37 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py" + "title": "discoveryengine_v1_generated_control_service_delete_control_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.delete_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient.get_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.GetControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "DeleteConversation" + "shortName": "GetControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.GetControlRequest" }, { "name": "name", @@ -847,21 +841,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.Control", + "shortName": "get_control" }, - "description": "Sample for DeleteConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py", + "description": "Sample for GetControl", + "file": "discoveryengine_v1_generated_control_service_get_control_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_async", + "regionTag": "discoveryengine_v1_generated_ControlService_GetControl_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -876,36 +871,38 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py" + "title": "discoveryengine_v1_generated_control_service_get_control_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient", + "shortName": "ControlServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.delete_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient.get_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.GetControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "DeleteConversation" + "shortName": "GetControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.GetControlRequest" }, { "name": "name", @@ -924,21 +921,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.Control", + "shortName": "get_control" }, - "description": "Sample for DeleteConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py", + "description": "Sample for GetControl", + "file": "discoveryengine_v1_generated_control_service_get_control_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_sync", + "regionTag": "discoveryengine_v1_generated_ControlService_GetControl_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -953,40 +951,42 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py" + "title": "discoveryengine_v1_generated_control_service_get_control_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.get_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient.list_controls", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.ListControls", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "GetConversation" + "shortName": "ListControls" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.ListControlsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1002,22 +1002,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.discoveryengine_v1.services.control_service.pagers.ListControlsAsyncPager", + "shortName": "list_controls" }, - "description": "Sample for GetConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py", + "description": "Sample for ListControls", + "file": "discoveryengine_v1_generated_control_service_list_controls_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_async", + "regionTag": "discoveryengine_v1_generated_ControlService_ListControls_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1037,36 +1037,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py" + "title": "discoveryengine_v1_generated_control_service_list_controls_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient", + "shortName": "ControlServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.get_conversation", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient.list_controls", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", + "fullName": "google.cloud.discoveryengine.v1.ControlService.ListControls", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "GetConversation" + "shortName": "ListControls" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.ListControlsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1082,22 +1082,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.discoveryengine_v1.services.control_service.pagers.ListControlsPager", + "shortName": "list_controls" }, - "description": "Sample for GetConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py", + "description": "Sample for ListControls", + "file": "discoveryengine_v1_generated_control_service_list_controls_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_sync", + "regionTag": "discoveryengine_v1_generated_ControlService_ListControls_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1117,38 +1117,42 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py" + "title": "discoveryengine_v1_generated_control_service_list_controls_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.list_conversations", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceAsyncClient.update_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", + "fullName": "google.cloud.discoveryengine.v1.ControlService.UpdateControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "ListConversations" + "shortName": "UpdateControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateControlRequest" }, { - "name": "parent", - "type": "str" + "name": "control", + "type": "google.cloud.discoveryengine_v1.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -1163,22 +1167,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsAsyncPager", - "shortName": "list_conversations" + "resultType": "google.cloud.discoveryengine_v1.types.Control", + "shortName": "update_control" }, - "description": "Sample for ListConversations", - "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py", + "description": "Sample for UpdateControl", + "file": "discoveryengine_v1_generated_control_service_update_control_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_async", + "regionTag": "discoveryengine_v1_generated_ControlService_UpdateControl_async", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -1188,51 +1192,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py" + "title": "discoveryengine_v1_generated_control_service_update_control_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient", + "shortName": "ControlServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.list_conversations", + "fullName": "google.cloud.discoveryengine_v1.ControlServiceClient.update_control", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", + "fullName": "google.cloud.discoveryengine.v1.ControlService.UpdateControl", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.ControlService", + "shortName": "ControlService" }, - "shortName": "ListConversations" + "shortName": "UpdateControl" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateControlRequest" }, { - "name": "parent", - "type": "str" + "name": "control", + "type": "google.cloud.discoveryengine_v1.types.Control" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" }, { "name": "timeout", @@ -1243,22 +1251,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsPager", - "shortName": "list_conversations" + "resultType": "google.cloud.discoveryengine_v1.types.Control", + "shortName": "update_control" }, - "description": "Sample for ListConversations", - "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py", + "description": "Sample for UpdateControl", + "file": "discoveryengine_v1_generated_control_service_update_control_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_sync", + "regionTag": "discoveryengine_v1_generated_ControlService_UpdateControl_sync", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -1268,22 +1276,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py" + "title": "discoveryengine_v1_generated_control_service_update_control_sync.py" }, { "canonical": true, @@ -1293,27 +1301,19 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.update_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.answer_query", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "UpdateConversation" + "shortName": "AnswerQuery" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.discoveryengine_v1.types.AnswerQueryRequest" }, { "name": "retry", @@ -1328,22 +1328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "update_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.AnswerQueryResponse", + "shortName": "answer_query" }, - "description": "Sample for UpdateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py", + "description": "Sample for AnswerQuery", + "file": "discoveryengine_v1_generated_conversational_search_service_answer_query_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_AnswerQuery_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1353,22 +1353,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_answer_query_async.py" }, { "canonical": true, @@ -1377,27 +1377,19 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.update_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.answer_query", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.AnswerQuery", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "UpdateConversation" + "shortName": "AnswerQuery" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.discoveryengine_v1.types.AnswerQueryRequest" }, { "name": "retry", @@ -1412,22 +1404,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "update_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.AnswerQueryResponse", + "shortName": "answer_query" }, - "description": "Sample for UpdateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py", + "description": "Sample for AnswerQuery", + "file": "discoveryengine_v1_generated_conversational_search_service_answer_query_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_AnswerQuery_sync", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1437,56 +1429,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_answer_query_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", - "shortName": "DataStoreServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.create_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.converse_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "CreateDataStore" + "shortName": "ConverseConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateDataStoreRequest" + "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "data_store", - "type": "google.cloud.discoveryengine_v1.types.DataStore" - }, - { - "name": "data_store_id", - "type": "str" + "name": "query", + "type": "google.cloud.discoveryengine_v1.types.TextInput" }, { "name": "retry", @@ -1501,22 +1489,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_store" + "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", + "shortName": "converse_conversation" }, - "description": "Sample for CreateDataStore", - "file": "discoveryengine_v1_generated_data_store_service_create_data_store_async.py", + "description": "Sample for ConverseConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_CreateDataStore_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_async", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1526,55 +1514,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_create_data_store_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", - "shortName": "DataStoreServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.create_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.converse_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "CreateDataStore" + "shortName": "ConverseConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateDataStoreRequest" + "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "data_store", - "type": "google.cloud.discoveryengine_v1.types.DataStore" - }, - { - "name": "data_store_id", - "type": "str" + "name": "query", + "type": "google.cloud.discoveryengine_v1.types.TextInput" }, { "name": "retry", @@ -1589,22 +1573,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_store" + "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", + "shortName": "converse_conversation" }, - "description": "Sample for CreateDataStore", - "file": "discoveryengine_v1_generated_data_store_service_create_data_store_sync.py", + "description": "Sample for ConverseConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_CreateDataStore_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_sync", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1614,49 +1598,53 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_create_data_store_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", - "shortName": "DataStoreServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.delete_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.create_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "DeleteDataStore" + "shortName": "CreateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1670,22 +1658,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_store" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "create_conversation" }, - "description": "Sample for DeleteDataStore", - "file": "discoveryengine_v1_generated_data_store_service_delete_data_store_async.py", + "description": "Sample for CreateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_DeleteDataStore_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1700,43 +1688,47 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_delete_data_store_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", - "shortName": "DataStoreServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.delete_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.create_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "DeleteDataStore" + "shortName": "CreateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1750,22 +1742,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_store" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "create_conversation" }, - "description": "Sample for DeleteDataStore", - "file": "discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py", + "description": "Sample for CreateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_DeleteDataStore_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1780,44 +1772,48 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", - "shortName": "DataStoreServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.get_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.create_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.GetDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "GetDataStore" + "shortName": "CreateSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetDataStoreRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateSessionRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "session", + "type": "google.cloud.discoveryengine_v1.types.Session" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1831,14 +1827,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.DataStore", - "shortName": "get_data_store" + "resultType": "google.cloud.discoveryengine_v1.types.Session", + "shortName": "create_session" }, - "description": "Sample for GetDataStore", - "file": "discoveryengine_v1_generated_data_store_service_get_data_store_async.py", + "description": "Sample for CreateSession", + "file": "discoveryengine_v1_generated_conversational_search_service_create_session_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_GetDataStore_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateSession_async", "segments": [ { "end": 51, @@ -1871,33 +1867,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_get_data_store_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_create_session_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", - "shortName": "DataStoreServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.get_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.create_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.GetDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "GetDataStore" + "shortName": "CreateSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetDataStoreRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateSessionRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "session", + "type": "google.cloud.discoveryengine_v1.types.Session" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1911,14 +1911,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.DataStore", - "shortName": "get_data_store" + "resultType": "google.cloud.discoveryengine_v1.types.Session", + "shortName": "create_session" }, - "description": "Sample for GetDataStore", - "file": "discoveryengine_v1_generated_data_store_service_get_data_store_sync.py", + "description": "Sample for CreateSession", + "file": "discoveryengine_v1_generated_conversational_search_service_create_session_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_GetDataStore_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateSession_sync", "segments": [ { "end": 51, @@ -1951,32 +1951,32 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_get_data_store_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_create_session_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", - "shortName": "DataStoreServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.list_data_stores", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.delete_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.ListDataStores", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "ListDataStores" + "shortName": "DeleteConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListDataStoresRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1992,22 +1992,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresAsyncPager", - "shortName": "list_data_stores" + "shortName": "delete_conversation" }, - "description": "Sample for ListDataStores", - "file": "discoveryengine_v1_generated_data_store_service_list_data_stores_async.py", + "description": "Sample for DeleteConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_ListDataStores_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2022,41 +2021,39 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_list_data_stores_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", - "shortName": "DataStoreServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.list_data_stores", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.delete_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.ListDataStores", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "ListDataStores" + "shortName": "DeleteConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListDataStoresRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2072,22 +2069,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresPager", - "shortName": "list_data_stores" + "shortName": "delete_conversation" }, - "description": "Sample for ListDataStores", - "file": "discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py", + "description": "Sample for DeleteConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_ListDataStores_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2102,47 +2098,41 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", - "shortName": "DataStoreServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.update_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.delete_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "UpdateDataStore" + "shortName": "DeleteSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest" - }, - { - "name": "data_store", - "type": "google.cloud.discoveryengine_v1.types.DataStore" + "type": "google.cloud.discoveryengine_v1.types.DeleteSessionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -2157,22 +2147,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.DataStore", - "shortName": "update_data_store" + "shortName": "delete_session" }, - "description": "Sample for UpdateDataStore", - "file": "discoveryengine_v1_generated_data_store_service_update_data_store_async.py", + "description": "Sample for DeleteSession", + "file": "discoveryengine_v1_generated_conversational_search_service_delete_session_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_UpdateDataStore_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteSession_async", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2182,51 +2171,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_update_data_store_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_delete_session_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", - "shortName": "DataStoreServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.update_data_store", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.delete_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DataStoreService", - "shortName": "DataStoreService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "UpdateDataStore" + "shortName": "DeleteSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest" - }, - { - "name": "data_store", - "type": "google.cloud.discoveryengine_v1.types.DataStore" + "type": "google.cloud.discoveryengine_v1.types.DeleteSessionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -2241,22 +2224,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.DataStore", - "shortName": "update_data_store" + "shortName": "delete_session" }, - "description": "Sample for UpdateDataStore", - "file": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py", + "description": "Sample for DeleteSession", + "file": "discoveryengine_v1_generated_conversational_search_service_delete_session_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DataStoreService_UpdateDataStore_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteSession_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2266,55 +2248,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_delete_session_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.create_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.get_answer", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "CreateDocument" + "shortName": "GetAnswer" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "document", - "type": "google.cloud.discoveryengine_v1.types.Document" + "type": "google.cloud.discoveryengine_v1.types.GetAnswerRequest" }, { - "name": "document_id", + "name": "name", "type": "str" }, { @@ -2330,22 +2302,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "create_document" + "resultType": "google.cloud.discoveryengine_v1.types.Answer", + "shortName": "get_answer" }, - "description": "Sample for CreateDocument", - "file": "discoveryengine_v1_generated_document_service_create_document_async.py", + "description": "Sample for GetAnswer", + "file": "discoveryengine_v1_generated_conversational_search_service_get_answer_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetAnswer_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2355,54 +2327,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_create_document_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_answer_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.create_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.get_answer", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "CreateDocument" + "shortName": "GetAnswer" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.discoveryengine_v1.types.GetAnswerRequest" }, { - "name": "document", - "type": "google.cloud.discoveryengine_v1.types.Document" - }, - { - "name": "document_id", + "name": "name", "type": "str" }, { @@ -2418,22 +2382,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "create_document" + "resultType": "google.cloud.discoveryengine_v1.types.Answer", + "shortName": "get_answer" }, - "description": "Sample for CreateDocument", - "file": "discoveryengine_v1_generated_document_service_create_document_sync.py", + "description": "Sample for GetAnswer", + "file": "discoveryengine_v1_generated_conversational_search_service_get_answer_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetAnswer_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2443,44 +2407,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_create_document_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_answer_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.delete_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.get_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "DeleteDocument" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" }, { "name": "name", @@ -2499,21 +2463,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_document" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for DeleteDocument", - "file": "discoveryengine_v1_generated_document_service_delete_document_async.py", + "description": "Sample for GetConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2528,36 +2493,38 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_delete_document_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.delete_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.get_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "DeleteDocument" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" }, { "name": "name", @@ -2576,21 +2543,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_document" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for DeleteDocument", - "file": "discoveryengine_v1_generated_document_service_delete_document_sync.py", + "description": "Sample for GetConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2605,37 +2573,39 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_delete_document_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.get_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.get_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "GetDocument" + "shortName": "GetSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.GetSessionRequest" }, { "name": "name", @@ -2654,14 +2624,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "get_document" + "resultType": "google.cloud.discoveryengine_v1.types.Session", + "shortName": "get_session" }, - "description": "Sample for GetDocument", - "file": "discoveryengine_v1_generated_document_service_get_document_async.py", + "description": "Sample for GetSession", + "file": "discoveryengine_v1_generated_conversational_search_service_get_session_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetSession_async", "segments": [ { "end": 51, @@ -2694,28 +2664,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_get_document_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_session_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.get_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.get_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "GetDocument" + "shortName": "GetSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.GetSessionRequest" }, { "name": "name", @@ -2734,14 +2704,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "get_document" + "resultType": "google.cloud.discoveryengine_v1.types.Session", + "shortName": "get_session" }, - "description": "Sample for GetDocument", - "file": "discoveryengine_v1_generated_document_service_get_document_sync.py", + "description": "Sample for GetSession", + "file": "discoveryengine_v1_generated_conversational_search_service_get_session_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetSession_sync", "segments": [ { "end": 51, @@ -2774,29 +2744,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_get_document_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_session_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.import_documents", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.list_conversations", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "ImportDocuments" + "shortName": "ListConversations" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -2811,22 +2785,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_documents" + "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsAsyncPager", + "shortName": "list_conversations" }, - "description": "Sample for ImportDocuments", - "file": "discoveryengine_v1_generated_document_service_import_documents_async.py", + "description": "Sample for ListConversations", + "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2841,38 +2815,42 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_import_documents_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.import_documents", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.list_conversations", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "ImportDocuments" + "shortName": "ListConversations" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -2887,22 +2865,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_documents" + "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsPager", + "shortName": "list_conversations" }, - "description": "Sample for ImportDocuments", - "file": "discoveryengine_v1_generated_document_service_import_documents_sync.py", + "description": "Sample for ListConversations", + "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2917,39 +2895,39 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_import_documents_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.list_documents", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.list_sessions", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListSessions", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "ListDocuments" + "shortName": "ListSessions" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.ListSessionsRequest" }, { "name": "parent", @@ -2968,14 +2946,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsAsyncPager", - "shortName": "list_documents" + "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListSessionsAsyncPager", + "shortName": "list_sessions" }, - "description": "Sample for ListDocuments", - "file": "discoveryengine_v1_generated_document_service_list_documents_async.py", + "description": "Sample for ListSessions", + "file": "discoveryengine_v1_generated_conversational_search_service_list_sessions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListSessions_async", "segments": [ { "end": 52, @@ -3008,28 +2986,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_list_documents_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_list_sessions_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.list_documents", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.list_sessions", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListSessions", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "ListDocuments" + "shortName": "ListSessions" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.ListSessionsRequest" }, { "name": "parent", @@ -3048,14 +3026,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsPager", - "shortName": "list_documents" + "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListSessionsPager", + "shortName": "list_sessions" }, - "description": "Sample for ListDocuments", - "file": "discoveryengine_v1_generated_document_service_list_documents_sync.py", + "description": "Sample for ListSessions", + "file": "discoveryengine_v1_generated_conversational_search_service_list_sessions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListSessions_sync", "segments": [ { "end": 52, @@ -3088,29 +3066,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_list_documents_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_list_sessions_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.purge_documents", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.update_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "PurgeDocuments" + "shortName": "UpdateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" + }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -3125,22 +3111,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "purge_documents" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "update_conversation" }, - "description": "Sample for PurgeDocuments", - "file": "discoveryengine_v1_generated_document_service_purge_documents_async.py", + "description": "Sample for UpdateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_async", "segments": [ { - "end": 56, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 50, "start": 27, "type": "SHORT" }, @@ -3150,43 +3136,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_purge_documents_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.purge_documents", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.update_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "PurgeDocuments" + "shortName": "UpdateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" + }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -3201,22 +3195,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "purge_documents" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "update_conversation" }, - "description": "Sample for PurgeDocuments", - "file": "discoveryengine_v1_generated_document_service_purge_documents_sync.py", + "description": "Sample for UpdateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_sync", "segments": [ { - "end": 56, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 50, "start": 27, "type": "SHORT" }, @@ -3226,48 +3220,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_purge_documents_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.update_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.update_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "UpdateDocument" + "shortName": "UpdateSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateSessionRequest" }, { - "name": "document", - "type": "google.cloud.discoveryengine_v1.types.Document" + "name": "session", + "type": "google.cloud.discoveryengine_v1.types.Session" }, { "name": "update_mask", @@ -3286,14 +3280,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "update_document" + "resultType": "google.cloud.discoveryengine_v1.types.Session", + "shortName": "update_session" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1_generated_document_service_update_document_async.py", + "description": "Sample for UpdateSession", + "file": "discoveryengine_v1_generated_conversational_search_service_update_session_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateSession_async", "segments": [ { "end": 50, @@ -3326,32 +3320,32 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_update_document_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_update_session_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.update_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.update_session", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateSession", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "UpdateDocument" + "shortName": "UpdateSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateSessionRequest" }, { - "name": "document", - "type": "google.cloud.discoveryengine_v1.types.Document" + "name": "session", + "type": "google.cloud.discoveryengine_v1.types.Session" }, { "name": "update_mask", @@ -3370,14 +3364,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "update_document" + "resultType": "google.cloud.discoveryengine_v1.types.Session", + "shortName": "update_session" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1_generated_document_service_update_document_sync.py", + "description": "Sample for UpdateSession", + "file": "discoveryengine_v1_generated_conversational_search_service_update_session_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateSession_sync", "segments": [ { "end": 50, @@ -3410,40 +3404,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_update_document_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_update_session_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", - "shortName": "EngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.create_engine", + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.create_data_store", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.CreateEngine", + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" }, - "shortName": "CreateEngine" + "shortName": "CreateDataStore" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateEngineRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateDataStoreRequest" }, { "name": "parent", "type": "str" }, { - "name": "engine", - "type": "google.cloud.discoveryengine_v1.types.Engine" + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" }, { - "name": "engine_id", + "name": "data_store_id", "type": "str" }, { @@ -3460,21 +3454,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_engine" + "shortName": "create_data_store" }, - "description": "Sample for CreateEngine", - "file": "discoveryengine_v1_generated_engine_service_create_engine_async.py", + "description": "Sample for CreateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_create_data_store_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_CreateEngine_async", + "regionTag": "discoveryengine_v1_generated_DataStoreService_CreateDataStore_async", "segments": [ { - "end": 61, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 60, "start": 27, "type": "SHORT" }, @@ -3484,54 +3478,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_create_engine_async.py" + "title": "discoveryengine_v1_generated_data_store_service_create_data_store_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", - "shortName": "EngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.create_engine", + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.create_data_store", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.CreateEngine", + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" }, - "shortName": "CreateEngine" + "shortName": "CreateDataStore" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateEngineRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateDataStoreRequest" }, { "name": "parent", "type": "str" }, { - "name": "engine", - "type": "google.cloud.discoveryengine_v1.types.Engine" + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" }, { - "name": "engine_id", + "name": "data_store_id", "type": "str" }, { @@ -3548,21 +3542,2462 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_engine" + "shortName": "create_data_store" + }, + "description": "Sample for CreateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_create_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_CreateDataStore_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_create_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.delete_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "DeleteDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_store" + }, + "description": "Sample for DeleteDataStore", + "file": "discoveryengine_v1_generated_data_store_service_delete_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_DeleteDataStore_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_delete_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.delete_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "DeleteDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_store" + }, + "description": "Sample for DeleteDataStore", + "file": "discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_DeleteDataStore_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.get_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.GetDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "GetDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "get_data_store" + }, + "description": "Sample for GetDataStore", + "file": "discoveryengine_v1_generated_data_store_service_get_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_GetDataStore_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_get_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.get_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.GetDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "GetDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "get_data_store" + }, + "description": "Sample for GetDataStore", + "file": "discoveryengine_v1_generated_data_store_service_get_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_GetDataStore_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_get_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.list_data_stores", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.ListDataStores", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "ListDataStores" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDataStoresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresAsyncPager", + "shortName": "list_data_stores" + }, + "description": "Sample for ListDataStores", + "file": "discoveryengine_v1_generated_data_store_service_list_data_stores_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_ListDataStores_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_list_data_stores_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.list_data_stores", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.ListDataStores", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "ListDataStores" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDataStoresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresPager", + "shortName": "list_data_stores" + }, + "description": "Sample for ListDataStores", + "file": "discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_ListDataStores_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.update_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "UpdateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "update_data_store" + }, + "description": "Sample for UpdateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_update_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_UpdateDataStore_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_update_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.update_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "UpdateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "update_data_store" + }, + "description": "Sample for UpdateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_UpdateDataStore_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1_generated_document_service_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1_generated_document_service_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1_generated_document_service_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1_generated_document_service_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1_generated_document_service_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1_generated_document_service_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1_generated_document_service_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1_generated_document_service_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1_generated_document_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1_generated_document_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1_generated_document_service_purge_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_purge_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1_generated_document_service_purge_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_purge_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1_generated_document_service_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1_generated_document_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.create_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.CreateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "CreateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateEngineRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "engine_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_engine" + }, + "description": "Sample for CreateEngine", + "file": "discoveryengine_v1_generated_engine_service_create_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_CreateEngine_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_create_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.create_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.CreateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "CreateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateEngineRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "engine_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_engine" + }, + "description": "Sample for CreateEngine", + "file": "discoveryengine_v1_generated_engine_service_create_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_CreateEngine_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_create_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.delete_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.DeleteEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "DeleteEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_engine" + }, + "description": "Sample for DeleteEngine", + "file": "discoveryengine_v1_generated_engine_service_delete_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_DeleteEngine_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_delete_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.delete_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.DeleteEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "DeleteEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_engine" + }, + "description": "Sample for DeleteEngine", + "file": "discoveryengine_v1_generated_engine_service_delete_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_DeleteEngine_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_delete_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.get_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.GetEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "GetEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "get_engine" + }, + "description": "Sample for GetEngine", + "file": "discoveryengine_v1_generated_engine_service_get_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_GetEngine_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_get_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.get_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.GetEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "GetEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "get_engine" + }, + "description": "Sample for GetEngine", + "file": "discoveryengine_v1_generated_engine_service_get_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_GetEngine_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_get_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.list_engines", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.ListEngines", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "ListEngines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListEnginesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesAsyncPager", + "shortName": "list_engines" + }, + "description": "Sample for ListEngines", + "file": "discoveryengine_v1_generated_engine_service_list_engines_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_ListEngines_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_list_engines_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.list_engines", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.ListEngines", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "ListEngines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListEnginesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesPager", + "shortName": "list_engines" }, - "description": "Sample for CreateEngine", - "file": "discoveryengine_v1_generated_engine_service_create_engine_sync.py", + "description": "Sample for ListEngines", + "file": "discoveryengine_v1_generated_engine_service_list_engines_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_CreateEngine_sync", + "regionTag": "discoveryengine_v1_generated_EngineService_ListEngines_sync", "segments": [ { - "end": 61, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3572,22 +6007,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_create_engine_sync.py" + "title": "discoveryengine_v1_generated_engine_service_list_engines_sync.py" }, { "canonical": true, @@ -3597,23 +6032,27 @@ "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", "shortName": "EngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.delete_engine", + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.update_engine", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.DeleteEngine", + "fullName": "google.cloud.discoveryengine.v1.EngineService.UpdateEngine", "service": { "fullName": "google.cloud.discoveryengine.v1.EngineService", "shortName": "EngineService" }, - "shortName": "DeleteEngine" + "shortName": "UpdateEngine" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteEngineRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateEngineRequest" }, { - "name": "name", - "type": "str" + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -3628,14 +6067,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_engine" + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "update_engine" }, - "description": "Sample for DeleteEngine", - "file": "discoveryengine_v1_generated_engine_service_delete_engine_async.py", + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1_generated_engine_service_update_engine_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_DeleteEngine_async", + "regionTag": "discoveryengine_v1_generated_EngineService_UpdateEngine_async", "segments": [ { "end": 55, @@ -3653,13 +6092,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 52, - "start": 46, + "start": 50, "type": "REQUEST_EXECUTION" }, { @@ -3668,7 +6107,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_delete_engine_async.py" + "title": "discoveryengine_v1_generated_engine_service_update_engine_async.py" }, { "canonical": true, @@ -3677,23 +6116,27 @@ "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", "shortName": "EngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.delete_engine", + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.update_engine", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.DeleteEngine", + "fullName": "google.cloud.discoveryengine.v1.EngineService.UpdateEngine", "service": { "fullName": "google.cloud.discoveryengine.v1.EngineService", "shortName": "EngineService" }, - "shortName": "DeleteEngine" + "shortName": "UpdateEngine" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteEngineRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateEngineRequest" }, { - "name": "name", - "type": "str" + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -3708,14 +6151,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_engine" + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "update_engine" }, - "description": "Sample for DeleteEngine", - "file": "discoveryengine_v1_generated_engine_service_delete_engine_sync.py", + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1_generated_engine_service_update_engine_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_DeleteEngine_sync", + "regionTag": "discoveryengine_v1_generated_EngineService_UpdateEngine_sync", "segments": [ { "end": 55, @@ -3733,13 +6176,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 52, - "start": 46, + "start": 50, "type": "REQUEST_EXECUTION" }, { @@ -3748,33 +6191,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_delete_engine_sync.py" + "title": "discoveryengine_v1_generated_engine_service_update_engine_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", - "shortName": "EngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.GroundedGenerationServiceAsyncClient", + "shortName": "GroundedGenerationServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.get_engine", + "fullName": "google.cloud.discoveryengine_v1.GroundedGenerationServiceAsyncClient.check_grounding", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.GetEngine", + "fullName": "google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.GroundedGenerationService", + "shortName": "GroundedGenerationService" }, - "shortName": "GetEngine" + "shortName": "CheckGrounding" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetEngineRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.discoveryengine_v1.types.CheckGroundingRequest" }, { "name": "retry", @@ -3789,14 +6228,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Engine", - "shortName": "get_engine" + "resultType": "google.cloud.discoveryengine_v1.types.CheckGroundingResponse", + "shortName": "check_grounding" }, - "description": "Sample for GetEngine", - "file": "discoveryengine_v1_generated_engine_service_get_engine_async.py", + "description": "Sample for CheckGrounding", + "file": "discoveryengine_v1_generated_grounded_generation_service_check_grounding_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_GetEngine_async", + "regionTag": "discoveryengine_v1_generated_GroundedGenerationService_CheckGrounding_async", "segments": [ { "end": 51, @@ -3829,32 +6268,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_get_engine_async.py" + "title": "discoveryengine_v1_generated_grounded_generation_service_check_grounding_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", - "shortName": "EngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.GroundedGenerationServiceClient", + "shortName": "GroundedGenerationServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.get_engine", + "fullName": "google.cloud.discoveryengine_v1.GroundedGenerationServiceClient.check_grounding", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.GetEngine", + "fullName": "google.cloud.discoveryengine.v1.GroundedGenerationService.CheckGrounding", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.GroundedGenerationService", + "shortName": "GroundedGenerationService" }, - "shortName": "GetEngine" + "shortName": "CheckGrounding" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetEngineRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.discoveryengine_v1.types.CheckGroundingRequest" }, { "name": "retry", @@ -3869,14 +6304,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Engine", - "shortName": "get_engine" + "resultType": "google.cloud.discoveryengine_v1.types.CheckGroundingResponse", + "shortName": "check_grounding" }, - "description": "Sample for GetEngine", - "file": "discoveryengine_v1_generated_engine_service_get_engine_sync.py", + "description": "Sample for CheckGrounding", + "file": "discoveryengine_v1_generated_grounded_generation_service_check_grounding_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_GetEngine_sync", + "regionTag": "discoveryengine_v1_generated_GroundedGenerationService_CheckGrounding_sync", "segments": [ { "end": 51, @@ -3909,32 +6344,32 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_get_engine_sync.py" + "title": "discoveryengine_v1_generated_grounded_generation_service_check_grounding_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", - "shortName": "EngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.list_engines", + "fullName": "google.cloud.discoveryengine_v1.ProjectServiceAsyncClient.provision_project", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.ListEngines", + "fullName": "google.cloud.discoveryengine.v1.ProjectService.ProvisionProject", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.ProjectService", + "shortName": "ProjectService" }, - "shortName": "ListEngines" + "shortName": "ProvisionProject" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListEnginesRequest" + "type": "google.cloud.discoveryengine_v1.types.ProvisionProjectRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3950,22 +6385,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesAsyncPager", - "shortName": "list_engines" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "provision_project" }, - "description": "Sample for ListEngines", - "file": "discoveryengine_v1_generated_engine_service_list_engines_async.py", + "description": "Sample for ProvisionProject", + "file": "discoveryengine_v1_generated_project_service_provision_project_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_ListEngines_async", + "regionTag": "discoveryengine_v1_generated_ProjectService_ProvisionProject_async", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -3975,46 +6410,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_list_engines_async.py" + "title": "discoveryengine_v1_generated_project_service_provision_project_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", - "shortName": "EngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.list_engines", + "fullName": "google.cloud.discoveryengine_v1.ProjectServiceClient.provision_project", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.ListEngines", + "fullName": "google.cloud.discoveryengine.v1.ProjectService.ProvisionProject", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.ProjectService", + "shortName": "ProjectService" }, - "shortName": "ListEngines" + "shortName": "ProvisionProject" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListEnginesRequest" + "type": "google.cloud.discoveryengine_v1.types.ProvisionProjectRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -4030,22 +6465,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesPager", - "shortName": "list_engines" + "resultType": "google.api_core.operation.Operation", + "shortName": "provision_project" }, - "description": "Sample for ListEngines", - "file": "discoveryengine_v1_generated_engine_service_list_engines_sync.py", + "description": "Sample for ProvisionProject", + "file": "discoveryengine_v1_generated_project_service_provision_project_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_ListEngines_sync", + "regionTag": "discoveryengine_v1_generated_ProjectService_ProvisionProject_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -4055,52 +6490,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_list_engines_sync.py" + "title": "discoveryengine_v1_generated_project_service_provision_project_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", - "shortName": "EngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.RankServiceAsyncClient", + "shortName": "RankServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.update_engine", + "fullName": "google.cloud.discoveryengine_v1.RankServiceAsyncClient.rank", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.UpdateEngine", + "fullName": "google.cloud.discoveryengine.v1.RankService.Rank", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.RankService", + "shortName": "RankService" }, - "shortName": "UpdateEngine" + "shortName": "Rank" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateEngineRequest" - }, - { - "name": "engine", - "type": "google.cloud.discoveryengine_v1.types.Engine" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.discoveryengine_v1.types.RankRequest" }, { "name": "retry", @@ -4115,22 +6542,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Engine", - "shortName": "update_engine" + "resultType": "google.cloud.discoveryengine_v1.types.RankResponse", + "shortName": "rank" }, - "description": "Sample for UpdateEngine", - "file": "discoveryengine_v1_generated_engine_service_update_engine_async.py", + "description": "Sample for Rank", + "file": "discoveryengine_v1_generated_rank_service_rank_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_UpdateEngine_async", + "regionTag": "discoveryengine_v1_generated_RankService_Rank_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4140,51 +6567,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_update_engine_async.py" + "title": "discoveryengine_v1_generated_rank_service_rank_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", - "shortName": "EngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.RankServiceClient", + "shortName": "RankServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.update_engine", + "fullName": "google.cloud.discoveryengine_v1.RankServiceClient.rank", "method": { - "fullName": "google.cloud.discoveryengine.v1.EngineService.UpdateEngine", + "fullName": "google.cloud.discoveryengine.v1.RankService.Rank", "service": { - "fullName": "google.cloud.discoveryengine.v1.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1.RankService", + "shortName": "RankService" }, - "shortName": "UpdateEngine" + "shortName": "Rank" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateEngineRequest" - }, - { - "name": "engine", - "type": "google.cloud.discoveryengine_v1.types.Engine" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.discoveryengine_v1.types.RankRequest" }, { "name": "retry", @@ -4199,22 +6618,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Engine", - "shortName": "update_engine" + "resultType": "google.cloud.discoveryengine_v1.types.RankResponse", + "shortName": "rank" }, - "description": "Sample for UpdateEngine", - "file": "discoveryengine_v1_generated_engine_service_update_engine_sync.py", + "description": "Sample for Rank", + "file": "discoveryengine_v1_generated_rank_service_rank_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_EngineService_UpdateEngine_sync", + "regionTag": "discoveryengine_v1_generated_RankService_Rank_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4224,22 +6643,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_engine_service_update_engine_sync.py" + "title": "discoveryengine_v1_generated_rank_service_rank_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index d424f375fec4..649ce18d3f94 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -1106,6 +1106,829 @@ ], "title": "discoveryengine_v1alpha_generated_completion_service_purge_suggestion_deny_list_entries_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient.create_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.CreateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "CreateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.CreateControlRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1alpha.types.Control" + }, + { + "name": "control_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.Control", + "shortName": "create_control" + }, + "description": "Sample for CreateControl", + "file": "discoveryengine_v1alpha_generated_control_service_create_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_CreateControl_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_create_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient.create_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.CreateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "CreateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.CreateControlRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1alpha.types.Control" + }, + { + "name": "control_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.Control", + "shortName": "create_control" + }, + "description": "Sample for CreateControl", + "file": "discoveryengine_v1alpha_generated_control_service_create_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_CreateControl_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_create_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient.delete_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.DeleteControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "DeleteControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.DeleteControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_control" + }, + "description": "Sample for DeleteControl", + "file": "discoveryengine_v1alpha_generated_control_service_delete_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_DeleteControl_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_delete_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient.delete_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.DeleteControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "DeleteControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.DeleteControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_control" + }, + "description": "Sample for DeleteControl", + "file": "discoveryengine_v1alpha_generated_control_service_delete_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_DeleteControl_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_delete_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient.get_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.GetControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "GetControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.GetControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.Control", + "shortName": "get_control" + }, + "description": "Sample for GetControl", + "file": "discoveryengine_v1alpha_generated_control_service_get_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_GetControl_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_get_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient.get_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.GetControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "GetControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.GetControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.Control", + "shortName": "get_control" + }, + "description": "Sample for GetControl", + "file": "discoveryengine_v1alpha_generated_control_service_get_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_GetControl_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_get_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient.list_controls", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.ListControls", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "ListControls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.ListControlsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.services.control_service.pagers.ListControlsAsyncPager", + "shortName": "list_controls" + }, + "description": "Sample for ListControls", + "file": "discoveryengine_v1alpha_generated_control_service_list_controls_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_ListControls_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_list_controls_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient.list_controls", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.ListControls", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "ListControls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.ListControlsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.services.control_service.pagers.ListControlsPager", + "shortName": "list_controls" + }, + "description": "Sample for ListControls", + "file": "discoveryengine_v1alpha_generated_control_service_list_controls_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_ListControls_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_list_controls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceAsyncClient.update_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.UpdateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "UpdateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.UpdateControlRequest" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1alpha.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.Control", + "shortName": "update_control" + }, + "description": "Sample for UpdateControl", + "file": "discoveryengine_v1alpha_generated_control_service_update_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_UpdateControl_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_update_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.ControlServiceClient.update_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService.UpdateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "UpdateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.UpdateControlRequest" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1alpha.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.Control", + "shortName": "update_control" + }, + "description": "Sample for UpdateControl", + "file": "discoveryengine_v1alpha_generated_control_service_update_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_ControlService_UpdateControl_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_control_service_update_control_sync.py" + }, { "canonical": true, "clientMethod": { @@ -9065,6 +9888,159 @@ ], "title": "discoveryengine_v1alpha_generated_search_service_search_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceAsyncClient.list_custom_models", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService", + "shortName": "SearchTuningService" + }, + "shortName": "ListCustomModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.ListCustomModelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.ListCustomModelsResponse", + "shortName": "list_custom_models" + }, + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SearchTuningService_ListCustomModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceClient.list_custom_models", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService.ListCustomModels", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService", + "shortName": "SearchTuningService" + }, + "shortName": "ListCustomModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.ListCustomModelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.ListCustomModelsResponse", + "shortName": "list_custom_models" + }, + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SearchTuningService_ListCustomModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_search_tuning_service_list_custom_models_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index 3e0ce89ad871..7f750dde8f9c 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -470,6 +470,829 @@ ], "title": "discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient.create_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.CreateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "CreateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateControlRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1beta.types.Control" + }, + { + "name": "control_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Control", + "shortName": "create_control" + }, + "description": "Sample for CreateControl", + "file": "discoveryengine_v1beta_generated_control_service_create_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_CreateControl_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_create_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient.create_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.CreateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "CreateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateControlRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1beta.types.Control" + }, + { + "name": "control_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Control", + "shortName": "create_control" + }, + "description": "Sample for CreateControl", + "file": "discoveryengine_v1beta_generated_control_service_create_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_CreateControl_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_create_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient.delete_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.DeleteControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "DeleteControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_control" + }, + "description": "Sample for DeleteControl", + "file": "discoveryengine_v1beta_generated_control_service_delete_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_DeleteControl_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_delete_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient.delete_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.DeleteControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "DeleteControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_control" + }, + "description": "Sample for DeleteControl", + "file": "discoveryengine_v1beta_generated_control_service_delete_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_DeleteControl_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_delete_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient.get_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.GetControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "GetControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Control", + "shortName": "get_control" + }, + "description": "Sample for GetControl", + "file": "discoveryengine_v1beta_generated_control_service_get_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_GetControl_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_get_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient.get_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.GetControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "GetControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Control", + "shortName": "get_control" + }, + "description": "Sample for GetControl", + "file": "discoveryengine_v1beta_generated_control_service_get_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_GetControl_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_get_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient.list_controls", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.ListControls", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "ListControls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListControlsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.control_service.pagers.ListControlsAsyncPager", + "shortName": "list_controls" + }, + "description": "Sample for ListControls", + "file": "discoveryengine_v1beta_generated_control_service_list_controls_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_ListControls_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_list_controls_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient.list_controls", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.ListControls", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "ListControls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListControlsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.control_service.pagers.ListControlsPager", + "shortName": "list_controls" + }, + "description": "Sample for ListControls", + "file": "discoveryengine_v1beta_generated_control_service_list_controls_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_ListControls_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_list_controls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceAsyncClient.update_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.UpdateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "UpdateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateControlRequest" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1beta.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Control", + "shortName": "update_control" + }, + "description": "Sample for UpdateControl", + "file": "discoveryengine_v1beta_generated_control_service_update_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_UpdateControl_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_update_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ControlServiceClient.update_control", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService.UpdateControl", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ControlService", + "shortName": "ControlService" + }, + "shortName": "UpdateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateControlRequest" + }, + { + "name": "control", + "type": "google.cloud.discoveryengine_v1beta.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Control", + "shortName": "update_control" + }, + "description": "Sample for UpdateControl", + "file": "discoveryengine_v1beta_generated_control_service_update_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ControlService_UpdateControl_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_control_service_update_control_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5762,41 +6585,194 @@ "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_update_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.update_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "UpdateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateEngineRequest" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1beta.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Engine", + "shortName": "update_engine" + }, + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1beta_generated_engine_service_update_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_UpdateEngine_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_update_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceAsyncClient", + "shortName": "GroundedGenerationServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceAsyncClient.check_grounding", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService.CheckGrounding", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService", + "shortName": "GroundedGenerationService" + }, + "shortName": "CheckGrounding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CheckGroundingRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.CheckGroundingResponse", + "shortName": "check_grounding" + }, + "description": "Sample for CheckGrounding", + "file": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_GroundedGenerationService_CheckGrounding_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_engine_service_update_engine_async.py" + "title": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", - "shortName": "EngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceClient", + "shortName": "GroundedGenerationServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.update_engine", + "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceClient.check_grounding", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine", + "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService.CheckGrounding", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.EngineService", - "shortName": "EngineService" + "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService", + "shortName": "GroundedGenerationService" }, - "shortName": "UpdateEngine" + "shortName": "CheckGrounding" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateEngineRequest" - }, - { - "name": "engine", - "type": "google.cloud.discoveryengine_v1beta.types.Engine" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.discoveryengine_v1beta.types.CheckGroundingRequest" }, { "name": "retry", @@ -5811,22 +6787,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Engine", - "shortName": "update_engine" + "resultType": "google.cloud.discoveryengine_v1beta.types.CheckGroundingResponse", + "shortName": "check_grounding" }, - "description": "Sample for UpdateEngine", - "file": "discoveryengine_v1beta_generated_engine_service_update_engine_sync.py", + "description": "Sample for CheckGrounding", + "file": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_EngineService_UpdateEngine_sync", + "regionTag": "discoveryengine_v1beta_generated_GroundedGenerationService_CheckGrounding_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -5836,44 +6812,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_engine_service_update_engine_sync.py" + "title": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceAsyncClient", - "shortName": "GroundedGenerationServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceAsyncClient.check_grounding", + "fullName": "google.cloud.discoveryengine_v1beta.ProjectServiceAsyncClient.provision_project", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService.CheckGrounding", + "fullName": "google.cloud.discoveryengine.v1beta.ProjectService.ProvisionProject", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService", - "shortName": "GroundedGenerationService" + "fullName": "google.cloud.discoveryengine.v1beta.ProjectService", + "shortName": "ProjectService" }, - "shortName": "CheckGrounding" + "shortName": "ProvisionProject" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CheckGroundingRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ProvisionProjectRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -5888,22 +6868,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.CheckGroundingResponse", - "shortName": "check_grounding" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "provision_project" }, - "description": "Sample for CheckGrounding", - "file": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_async.py", + "description": "Sample for ProvisionProject", + "file": "discoveryengine_v1beta_generated_project_service_provision_project_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_GroundedGenerationService_CheckGrounding_async", + "regionTag": "discoveryengine_v1beta_generated_ProjectService_ProvisionProject_async", "segments": [ { - "end": 51, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5913,43 +6893,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_async.py" + "title": "discoveryengine_v1beta_generated_project_service_provision_project_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceClient", - "shortName": "GroundedGenerationServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.GroundedGenerationServiceClient.check_grounding", + "fullName": "google.cloud.discoveryengine_v1beta.ProjectServiceClient.provision_project", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService.CheckGrounding", + "fullName": "google.cloud.discoveryengine.v1beta.ProjectService.ProvisionProject", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.GroundedGenerationService", - "shortName": "GroundedGenerationService" + "fullName": "google.cloud.discoveryengine.v1beta.ProjectService", + "shortName": "ProjectService" }, - "shortName": "CheckGrounding" + "shortName": "ProvisionProject" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CheckGroundingRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ProvisionProjectRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -5964,22 +6948,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.CheckGroundingResponse", - "shortName": "check_grounding" + "resultType": "google.api_core.operation.Operation", + "shortName": "provision_project" }, - "description": "Sample for CheckGrounding", - "file": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_sync.py", + "description": "Sample for ProvisionProject", + "file": "discoveryengine_v1beta_generated_project_service_provision_project_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_GroundedGenerationService_CheckGrounding_sync", + "regionTag": "discoveryengine_v1beta_generated_ProjectService_ProvisionProject_sync", "segments": [ { - "end": 51, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5989,22 +6973,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_grounded_generation_service_check_grounding_sync.py" + "title": "discoveryengine_v1beta_generated_project_service_provision_project_sync.py" }, { "canonical": true, @@ -7278,6 +8262,159 @@ ], "title": "discoveryengine_v1beta_generated_search_service_search_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SearchTuningServiceAsyncClient.list_custom_models", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchTuningService", + "shortName": "SearchTuningService" + }, + "shortName": "ListCustomModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListCustomModelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.ListCustomModelsResponse", + "shortName": "list_custom_models" + }, + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SearchTuningService_ListCustomModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SearchTuningServiceClient.list_custom_models", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchTuningService.ListCustomModels", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchTuningService", + "shortName": "SearchTuningService" + }, + "shortName": "ListCustomModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListCustomModelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.ListCustomModelsResponse", + "shortName": "list_custom_models" + }, + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SearchTuningService_ListCustomModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_search_tuning_service_list_custom_models_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py index 3009b9eb70ed..e485f8818fbd 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py @@ -39,54 +39,69 @@ def partition( class discoveryengineCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', ), 'batch_create_target_sites': ('parent', 'requests', ), 'batch_verify_target_sites': ('parent', ), + 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', 'include_tail_suggestions', ), 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', 'boost_spec', ), + 'create_control': ('parent', 'control', 'control_id', ), 'create_conversation': ('parent', 'conversation', ), 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), 'create_document': ('parent', 'document', 'document_id', ), 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_session': ('parent', 'session', ), 'create_target_site': ('parent', 'target_site', ), + 'delete_control': ('name', ), 'delete_conversation': ('name', ), 'delete_data_store': ('name', ), 'delete_document': ('name', ), 'delete_engine': ('name', ), 'delete_schema': ('name', ), + 'delete_session': ('name', ), 'delete_target_site': ('name', ), 'disable_advanced_site_search': ('site_search_engine', ), 'enable_advanced_site_search': ('site_search_engine', ), 'fetch_domain_verification_status': ('site_search_engine', 'page_size', 'page_token', ), + 'get_answer': ('name', ), + 'get_control': ('name', ), 'get_conversation': ('name', ), 'get_data_store': ('name', ), 'get_document': ('name', ), 'get_engine': ('name', ), 'get_schema': ('name', ), + 'get_session': ('name', ), 'get_site_search_engine': ('name', ), 'get_target_site': ('name', ), 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'fhir_store_source', 'spanner_source', 'cloud_sql_source', 'firestore_source', 'bigtable_source', 'error_config', 'reconciliation_mode', 'update_mask', 'auto_generate_ids', 'id_field', ), 'import_suggestion_deny_list_entries': ('parent', 'inline_source', 'gcs_source', ), 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), + 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), 'list_schemas': ('parent', 'page_size', 'page_token', ), + 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_target_sites': ('parent', 'page_size', 'page_token', ), + 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_documents': ('parent', 'filter', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), + 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', ), + 'update_control': ('control', 'update_mask', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), 'update_document': ('document', 'allow_missing', 'update_mask', ), 'update_engine': ('engine', 'update_mask', ), 'update_schema': ('schema', 'allow_missing', ), + 'update_session': ('session', 'update_mask', ), 'update_target_site': ('target_site', ), - 'write_user_event': ('parent', 'user_event', ), + 'write_user_event': ('parent', 'user_event', 'write_async', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py index d85711506fa5..67511a5f2831 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py @@ -42,10 +42,11 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', ), 'batch_create_target_sites': ('parent', 'requests', ), 'batch_verify_target_sites': ('parent', ), - 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', ), + 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', 'include_tail_suggestions', ), 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', 'boost_spec', ), + 'create_control': ('parent', 'control', 'control_id', ), 'create_conversation': ('parent', 'conversation', ), 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), 'create_document': ('parent', 'document', 'document_id', ), @@ -53,6 +54,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_session': ('parent', 'session', ), 'create_target_site': ('parent', 'target_site', ), + 'delete_control': ('name', ), 'delete_conversation': ('name', ), 'delete_data_store': ('name', ), 'delete_document': ('name', ), @@ -67,6 +69,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'get_acl_config': ('name', ), 'get_answer': ('name', ), 'get_chunk': ('name', ), + 'get_control': ('name', ), 'get_conversation': ('name', ), 'get_data_store': ('name', ), 'get_document': ('name', ), @@ -83,7 +86,9 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'import_suggestion_deny_list_entries': ('parent', 'inline_source', 'gcs_source', ), 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_chunks': ('parent', 'page_size', 'page_token', ), + 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_custom_models': ('data_store', ), 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), @@ -96,15 +101,16 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'purge_documents': ('parent', 'filter', 'gcs_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), 'purge_user_events': ('parent', 'filter', 'force', ), - 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', ), + 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'report_consent_change': ('consent_change_action', 'project', 'service_term_id', 'service_term_version', ), 'resume_engine': ('name', ), 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'custom_fine_tuning_spec', ), - 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', ), + 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'tune_engine': ('name', ), 'update_acl_config': ('acl_config', ), + 'update_control': ('control', 'update_mask', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), 'update_document': ('document', 'allow_missing', 'update_mask', ), @@ -114,7 +120,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'update_serving_config': ('serving_config', 'update_mask', ), 'update_session': ('session', 'update_mask', ), 'update_target_site': ('target_site', ), - 'write_user_event': ('parent', 'user_event', ), + 'write_user_event': ('parent', 'user_event', 'write_async', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py index 0e95dcadf879..85127db6149f 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py @@ -42,10 +42,11 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', ), 'batch_create_target_sites': ('parent', 'requests', ), 'batch_verify_target_sites': ('parent', ), - 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', ), + 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', 'include_tail_suggestions', ), 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', 'boost_spec', ), + 'create_control': ('parent', 'control', 'control_id', ), 'create_conversation': ('parent', 'conversation', ), 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), 'create_document': ('parent', 'document', 'document_id', ), @@ -53,6 +54,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_session': ('parent', 'session', ), 'create_target_site': ('parent', 'target_site', ), + 'delete_control': ('name', ), 'delete_conversation': ('name', ), 'delete_data_store': ('name', ), 'delete_document': ('name', ), @@ -64,6 +66,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'enable_advanced_site_search': ('site_search_engine', ), 'fetch_domain_verification_status': ('site_search_engine', 'page_size', 'page_token', ), 'get_answer': ('name', ), + 'get_control': ('name', ), 'get_conversation': ('name', ), 'get_data_store': ('name', ), 'get_document': ('name', ), @@ -76,7 +79,9 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'fhir_store_source', 'spanner_source', 'cloud_sql_source', 'firestore_source', 'bigtable_source', 'error_config', 'reconciliation_mode', 'update_mask', 'auto_generate_ids', 'id_field', ), 'import_suggestion_deny_list_entries': ('parent', 'inline_source', 'gcs_source', ), 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), + 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_custom_models': ('data_store', ), 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), @@ -85,15 +90,17 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'pause_engine': ('name', ), + 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_documents': ('parent', 'filter', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), - 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', ), + 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'resume_engine': ('name', ), 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', ), - 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', ), + 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'tune_engine': ('name', ), + 'update_control': ('control', 'update_mask', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), 'update_document': ('document', 'allow_missing', 'update_mask', ), @@ -102,7 +109,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'update_serving_config': ('serving_config', 'update_mask', ), 'update_session': ('session', 'update_mask', ), 'update_target_site': ('target_site', ), - 'write_user_event': ('parent', 'user_event', ), + 'write_user_event': ('parent', 'user_event', 'write_async', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py index f3593b11e42e..c5d8d919906b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py @@ -3025,6 +3025,7 @@ def test_completion_service_base_transport(): "import_suggestion_deny_list_entries", "purge_suggestion_deny_list_entries", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3643,6 +3644,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3757,6 +3816,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = CompletionServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py new file mode 100644 index 000000000000..d8fd704dc100 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py @@ -0,0 +1,6518 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.control_service import ( + ControlServiceAsyncClient, + ControlServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import control +from google.cloud.discoveryengine_v1.types import control as gcd_control +from google.cloud.discoveryengine_v1.types import control_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ControlServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ControlServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ControlServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ControlServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ControlServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ControlServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ControlServiceClient._get_client_cert_source(None, False) is None + assert ( + ControlServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ControlServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ControlServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ControlServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + default_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ControlServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ControlServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ControlServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ControlServiceClient._get_universe_domain(None, None) + == ControlServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ControlServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ControlServiceClient, "grpc"), + (ControlServiceAsyncClient, "grpc_asyncio"), + (ControlServiceClient, "rest"), + ], +) +def test_control_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ControlServiceGrpcTransport, "grpc"), + (transports.ControlServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ControlServiceRestTransport, "rest"), + ], +) +def test_control_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ControlServiceClient, "grpc"), + (ControlServiceAsyncClient, "grpc_asyncio"), + (ControlServiceClient, "rest"), + ], +) +def test_control_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_control_service_client_get_transport_class(): + transport = ControlServiceClient.get_transport_class() + available_transports = [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceRestTransport, + ] + assert transport in available_transports + + transport = ControlServiceClient.get_transport_class("grpc") + assert transport == transports.ControlServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test_control_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ControlServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ControlServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc", "true"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc", "false"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", "true"), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_control_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ControlServiceClient, ControlServiceAsyncClient] +) +@mock.patch.object( + ControlServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ControlServiceAsyncClient), +) +def test_control_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ControlServiceClient, ControlServiceAsyncClient] +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test_control_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + default_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +def test_control_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ControlServiceClient, + transports.ControlServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", None), + ], +) +def test_control_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_control_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.control_service.transports.ControlServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ControlServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ControlServiceClient, + transports.ControlServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_control_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.CreateControlRequest, + dict, + ], +) +def test_create_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.CreateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_create_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest() + + +def test_create_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.CreateControlRequest( + parent="parent_value", + control_id="control_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest( + parent="parent_value", + control_id="control_id_value", + ) + + +def test_create_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_control] = mock_rpc + request = {} + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.create_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest() + + +@pytest.mark.asyncio +async def test_create_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_control + ] = mock_object + + request = {} + await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_control_async( + transport: str = "grpc_asyncio", request_type=control_service.CreateControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.CreateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_create_control_async_from_dict(): + await test_create_control_async(request_type=dict) + + +def test_create_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.CreateControlRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value = gcd_control.Control() + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.CreateControlRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_control( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].control_id + mock_val = "control_id_value" + assert arg == mock_val + + +def test_create_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_control( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].control_id + mock_val = "control_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.DeleteControlRequest, + dict, + ], +) +def test_delete_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.DeleteControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest() + + +def test_delete_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.DeleteControlRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest( + name="name_value", + ) + + +def test_delete_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_control] = mock_rpc + request = {} + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest() + + +@pytest.mark.asyncio +async def test_delete_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_control + ] = mock_object + + request = {} + await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_control_async( + transport: str = "grpc_asyncio", request_type=control_service.DeleteControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.DeleteControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_control_async_from_dict(): + await test_delete_control_async(request_type=dict) + + +def test_delete_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.DeleteControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value = None + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.DeleteControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.UpdateControlRequest, + dict, + ], +) +def test_update_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.UpdateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_update_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +def test_update_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.UpdateControlRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +def test_update_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_control] = mock_rpc + request = {} + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.update_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +@pytest.mark.asyncio +async def test_update_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_control + ] = mock_object + + request = {} + await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_control_async( + transport: str = "grpc_asyncio", request_type=control_service.UpdateControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.UpdateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_update_control_async_from_dict(): + await test_update_control_async(request_type=dict) + + +def test_update_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.UpdateControlRequest() + + request.control.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value = gcd_control.Control() + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "control.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.UpdateControlRequest() + + request.control.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "control.name=name_value", + ) in kw["metadata"] + + +def test_update_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_control( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_control( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.GetControlRequest, + dict, + ], +) +def test_get_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.GetControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_get_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest() + + +def test_get_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.GetControlRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest( + name="name_value", + ) + + +def test_get_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_control] = mock_rpc + request = {} + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.get_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest() + + +@pytest.mark.asyncio +async def test_get_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_control + ] = mock_object + + request = {} + await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_control_async( + transport: str = "grpc_asyncio", request_type=control_service.GetControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.GetControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_get_control_async_from_dict(): + await test_get_control_async(request_type=dict) + + +def test_get_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.GetControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value = control.Control() + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.GetControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(control.Control()) + await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.ListControlsRequest, + dict, + ], +) +def test_list_controls(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.ListControlsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_controls_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_controls() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest() + + +def test_list_controls_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.ListControlsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_controls(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_controls_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_controls in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_controls] = mock_rpc + request = {} + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_controls_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_controls() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest() + + +@pytest.mark.asyncio +async def test_list_controls_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_controls + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_controls + ] = mock_object + + request = {} + await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_controls_async( + transport: str = "grpc_asyncio", request_type=control_service.ListControlsRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.ListControlsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_controls_async_from_dict(): + await test_list_controls_async(request_type=dict) + + +def test_list_controls_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.ListControlsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value = control_service.ListControlsResponse() + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_controls_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.ListControlsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse() + ) + await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_controls_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_controls( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_controls_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_controls_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_controls( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_controls_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +def test_list_controls_pager(transport_name: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_controls(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, control.Control) for i in results) + + +def test_list_controls_pages(transport_name: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + pages = list(client.list_controls(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_controls_async_pager(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_controls), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_controls( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, control.Control) for i in responses) + + +@pytest.mark.asyncio +async def test_list_controls_async_pages(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_controls), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_controls(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.CreateControlRequest, + dict, + ], +) +def test_create_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["control"] = { + "boost_action": { + "boost": 0.551, + "filter": "filter_value", + "data_store": "data_store_value", + }, + "filter_action": {"filter": "filter_value", "data_store": "data_store_value"}, + "redirect_action": {"redirect_uri": "redirect_uri_value"}, + "synonyms_action": {"synonyms": ["synonyms_value1", "synonyms_value2"]}, + "name": "name_value", + "display_name": "display_name_value", + "associated_serving_config_ids": [ + "associated_serving_config_ids_value1", + "associated_serving_config_ids_value2", + ], + "solution_type": 1, + "use_cases": [1], + "conditions": [ + { + "query_terms": [{"value": "value_value", "full_match": True}], + "active_time_range": [ + {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = control_service.CreateControlRequest.meta.fields["control"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["control"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["control"][field])): + del request_init["control"][field][i][subfield] + else: + del request_init["control"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_create_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_control] = mock_rpc + + request = {} + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_control_rest_required_fields( + request_type=control_service.CreateControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["control_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "controlId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "controlId" in jsonified_request + assert jsonified_request["controlId"] == request_init["control_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["controlId"] = "control_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_control._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("control_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "controlId" in jsonified_request + assert jsonified_request["controlId"] == "control_id_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_control(request) + + expected_params = [ + ( + "controlId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_control._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("controlId",)) + & set( + ( + "parent", + "control", + "controlId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_create_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_create_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.CreateControlRequest.pb( + control_service.CreateControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_control.Control.to_json(gcd_control.Control()) + + request = control_service.CreateControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_control.Control() + + client.create_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_control_rest_bad_request( + transport: str = "rest", request_type=control_service.CreateControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_control(request) + + +def test_create_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/controls" + % client.transport._host, + args[1], + ) + + +def test_create_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +def test_create_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.DeleteControlRequest, + dict, + ], +) +def test_delete_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_control(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_control] = mock_rpc + + request = {} + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_control_rest_required_fields( + request_type=control_service.DeleteControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_delete_control" + ) as pre: + pre.assert_not_called() + pb_message = control_service.DeleteControlRequest.pb( + control_service.DeleteControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = control_service.DeleteControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_control_rest_bad_request( + transport: str = "rest", request_type=control_service.DeleteControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_control(request) + + +def test_delete_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +def test_delete_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.UpdateControlRequest, + dict, + ], +) +def test_update_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + request_init["control"] = { + "boost_action": { + "boost": 0.551, + "filter": "filter_value", + "data_store": "data_store_value", + }, + "filter_action": {"filter": "filter_value", "data_store": "data_store_value"}, + "redirect_action": {"redirect_uri": "redirect_uri_value"}, + "synonyms_action": {"synonyms": ["synonyms_value1", "synonyms_value2"]}, + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4", + "display_name": "display_name_value", + "associated_serving_config_ids": [ + "associated_serving_config_ids_value1", + "associated_serving_config_ids_value2", + ], + "solution_type": 1, + "use_cases": [1], + "conditions": [ + { + "query_terms": [{"value": "value_value", "full_match": True}], + "active_time_range": [ + {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = control_service.UpdateControlRequest.meta.fields["control"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["control"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["control"][field])): + del request_init["control"][field][i][subfield] + else: + del request_init["control"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_update_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_control] = mock_rpc + + request = {} + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_control_rest_required_fields( + request_type=control_service.UpdateControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_control._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("control",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_update_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_update_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.UpdateControlRequest.pb( + control_service.UpdateControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_control.Control.to_json(gcd_control.Control()) + + request = control_service.UpdateControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_control.Control() + + client.update_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_control_rest_bad_request( + transport: str = "rest", request_type=control_service.UpdateControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_control(request) + + +def test_update_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{control.name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_update_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.GetControlRequest, + dict, + ], +) +def test_get_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_get_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_control] = mock_rpc + + request = {} + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_control_rest_required_fields( + request_type=control_service.GetControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_get_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_get_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.GetControlRequest.pb( + control_service.GetControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control.Control.to_json(control.Control()) + + request = control_service.GetControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control.Control() + + client.get_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_control_rest_bad_request( + transport: str = "rest", request_type=control_service.GetControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_control(request) + + +def test_get_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_get_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +def test_get_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.ListControlsRequest, + dict, + ], +) +def test_list_controls_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_controls(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_controls_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_controls in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_controls] = mock_rpc + + request = {} + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_controls_rest_required_fields( + request_type=control_service.ListControlsRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_controls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_controls._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_controls(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_controls_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_controls._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_controls_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_list_controls" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_list_controls" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.ListControlsRequest.pb( + control_service.ListControlsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control_service.ListControlsResponse.to_json( + control_service.ListControlsResponse() + ) + + request = control_service.ListControlsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control_service.ListControlsResponse() + + client.list_controls( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_controls_rest_bad_request( + transport: str = "rest", request_type=control_service.ListControlsRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_controls(request) + + +def test_list_controls_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_controls(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/controls" + % client.transport._host, + args[1], + ) + + +def test_list_controls_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +def test_list_controls_rest_pager(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + control_service.ListControlsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_controls(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, control.Control) for i in results) + + pages = list(client.list_controls(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ControlServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ControlServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + transports.ControlServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ControlServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ControlServiceGrpcTransport, + ) + + +def test_control_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ControlServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_control_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.control_service.transports.ControlServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ControlServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_control", + "delete_control", + "update_control", + "get_control", + "list_controls", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_control_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.control_service.transports.ControlServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ControlServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_control_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.control_service.transports.ControlServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ControlServiceTransport() + adc.assert_called_once() + + +def test_control_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ControlServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + transports.ControlServiceRestTransport, + ], +) +def test_control_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ControlServiceGrpcTransport, grpc_helpers), + (transports.ControlServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_control_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_control_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ControlServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_control_service_host_no_port(transport_name): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_control_service_host_with_port(transport_name): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_control_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ControlServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ControlServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_control._session + session2 = client2.transport.create_control._session + assert session1 != session2 + session1 = client1.transport.delete_control._session + session2 = client2.transport.delete_control._session + assert session1 != session2 + session1 = client1.transport.update_control._session + session2 = client2.transport.update_control._session + assert session1 != session2 + session1 = client1.transport.get_control._session + session2 = client2.transport.get_control._session + assert session1 != session2 + session1 = client1.transport.list_controls._session + session2 = client2.transport.list_controls._session + assert session1 != session2 + + +def test_control_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ControlServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_control_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ControlServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_control_path(): + project = "squid" + location = "clam" + data_store = "whelk" + control = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/controls/{control}".format( + project=project, + location=location, + data_store=data_store, + control=control, + ) + actual = ControlServiceClient.control_path(project, location, data_store, control) + assert expected == actual + + +def test_parse_control_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "control": "mussel", + } + path = ControlServiceClient.control_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_control_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = ControlServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + } + path = ControlServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ControlServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ControlServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ControlServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ControlServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ControlServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ControlServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ControlServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ControlServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ControlServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ControlServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ControlServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ControlServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ControlServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport), + (ControlServiceAsyncClient, transports.ControlServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py index d78b757444ca..d6e1aae82425 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py @@ -57,7 +57,10 @@ search_service, ) from google.cloud.discoveryengine_v1.types import conversation as gcd_conversation +from google.cloud.discoveryengine_v1.types import answer from google.cloud.discoveryengine_v1.types import conversation +from google.cloud.discoveryengine_v1.types import session +from google.cloud.discoveryengine_v1.types import session as gcd_session def client_cert_source_callback(): @@ -3631,13 +3634,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3790,6 +3793,2756 @@ async def test_list_conversations_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.AnswerQueryRequest, + dict, + ], +) +def test_answer_query(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) + response = client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.AnswerQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" + + +def test_answer_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.answer_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.AnswerQueryRequest() + + +def test_answer_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.AnswerQueryRequest( + serving_config="serving_config_value", + session="session_value", + user_pseudo_id="user_pseudo_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.answer_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.AnswerQueryRequest( + serving_config="serving_config_value", + session="session_value", + user_pseudo_id="user_pseudo_id_value", + ) + + +def test_answer_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.answer_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.answer_query] = mock_rpc + request = {} + client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.answer_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_answer_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) + ) + response = await client.answer_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.AnswerQueryRequest() + + +@pytest.mark.asyncio +async def test_answer_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.answer_query + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.answer_query + ] = mock_object + + request = {} + await client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.answer_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_answer_query_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.AnswerQueryRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) + ) + response = await client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.AnswerQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" + + +@pytest.mark.asyncio +async def test_answer_query_async_from_dict(): + await test_answer_query_async(request_type=dict) + + +def test_answer_query_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.AnswerQueryRequest() + + request.serving_config = "serving_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + call.return_value = conversational_search_service.AnswerQueryResponse() + client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config=serving_config_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_answer_query_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.AnswerQueryRequest() + + request.serving_config = "serving_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.answer_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.AnswerQueryResponse() + ) + await client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config=serving_config_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.GetAnswerRequest, + dict, + ], +) +def test_get_answer(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = answer.Answer( + name="name_value", + state=answer.Answer.State.IN_PROGRESS, + answer_text="answer_text_value", + related_questions=["related_questions_value"], + answer_skipped_reasons=[ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ], + ) + response = client.get_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.GetAnswerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, answer.Answer) + assert response.name == "name_value" + assert response.state == answer.Answer.State.IN_PROGRESS + assert response.answer_text == "answer_text_value" + assert response.related_questions == ["related_questions_value"] + assert response.answer_skipped_reasons == [ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ] + + +def test_get_answer_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_answer() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.GetAnswerRequest() + + +def test_get_answer_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.GetAnswerRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_answer(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.GetAnswerRequest( + name="name_value", + ) + + +def test_get_answer_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_answer in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_answer] = mock_rpc + request = {} + client.get_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_answer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_answer_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + answer.Answer( + name="name_value", + state=answer.Answer.State.IN_PROGRESS, + answer_text="answer_text_value", + related_questions=["related_questions_value"], + answer_skipped_reasons=[ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ], + ) + ) + response = await client.get_answer() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.GetAnswerRequest() + + +@pytest.mark.asyncio +async def test_get_answer_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_answer + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_answer + ] = mock_object + + request = {} + await client.get_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_answer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_answer_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.GetAnswerRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + answer.Answer( + name="name_value", + state=answer.Answer.State.IN_PROGRESS, + answer_text="answer_text_value", + related_questions=["related_questions_value"], + answer_skipped_reasons=[ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ], + ) + ) + response = await client.get_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.GetAnswerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, answer.Answer) + assert response.name == "name_value" + assert response.state == answer.Answer.State.IN_PROGRESS + assert response.answer_text == "answer_text_value" + assert response.related_questions == ["related_questions_value"] + assert response.answer_skipped_reasons == [ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ] + + +@pytest.mark.asyncio +async def test_get_answer_async_from_dict(): + await test_get_answer_async(request_type=dict) + + +def test_get_answer_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.GetAnswerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + call.return_value = answer.Answer() + client.get_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_answer_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.GetAnswerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(answer.Answer()) + await client.get_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_answer_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = answer.Answer() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_answer( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_answer_flattened_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_answer( + conversational_search_service.GetAnswerRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_answer_flattened_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = answer.Answer() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(answer.Answer()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_answer( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_answer_flattened_error_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_answer( + conversational_search_service.GetAnswerRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.CreateSessionRequest, + dict, + ], +) +def test_create_session(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_session.Session( + name="name_value", + state=gcd_session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + response = client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.CreateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_session.Session) + assert response.name == "name_value" + assert response.state == gcd_session.Session.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +def test_create_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.CreateSessionRequest() + + +def test_create_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.CreateSessionRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.CreateSessionRequest( + parent="parent_value", + ) + + +def test_create_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_session] = mock_rpc + request = {} + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_session_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_session.Session( + name="name_value", + state=gcd_session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + ) + response = await client.create_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.CreateSessionRequest() + + +@pytest.mark.asyncio +async def test_create_session_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_session + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_session + ] = mock_object + + request = {} + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_session_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.CreateSessionRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_session.Session( + name="name_value", + state=gcd_session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + ) + response = await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.CreateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_session.Session) + assert response.name == "name_value" + assert response.state == gcd_session.Session.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +@pytest.mark.asyncio +async def test_create_session_async_from_dict(): + await test_create_session_async(request_type=dict) + + +def test_create_session_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.CreateSessionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + call.return_value = gcd_session.Session() + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_session_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.CreateSessionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_session.Session()) + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_session_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_session.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_session( + parent="parent_value", + session=gcd_session.Session(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].session + mock_val = gcd_session.Session(name="name_value") + assert arg == mock_val + + +def test_create_session_flattened_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_session( + conversational_search_service.CreateSessionRequest(), + parent="parent_value", + session=gcd_session.Session(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_session_flattened_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_session.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_session.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_session( + parent="parent_value", + session=gcd_session.Session(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].session + mock_val = gcd_session.Session(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_session_flattened_error_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_session( + conversational_search_service.CreateSessionRequest(), + parent="parent_value", + session=gcd_session.Session(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.DeleteSessionRequest, + dict, + ], +) +def test_delete_session(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.DeleteSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.DeleteSessionRequest() + + +def test_delete_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.DeleteSessionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.DeleteSessionRequest( + name="name_value", + ) + + +def test_delete_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc + request = {} + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_session_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.DeleteSessionRequest() + + +@pytest.mark.asyncio +async def test_delete_session_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_session + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_session + ] = mock_object + + request = {} + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_session_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.DeleteSessionRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.DeleteSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_session_async_from_dict(): + await test_delete_session_async(request_type=dict) + + +def test_delete_session_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.DeleteSessionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + call.return_value = None + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_session_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.DeleteSessionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_session_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_session( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_session_flattened_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_session( + conversational_search_service.DeleteSessionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_session_flattened_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_session( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_session_flattened_error_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_session( + conversational_search_service.DeleteSessionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.UpdateSessionRequest, + dict, + ], +) +def test_update_session(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_session.Session( + name="name_value", + state=gcd_session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + response = client.update_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.UpdateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_session.Session) + assert response.name == "name_value" + assert response.state == gcd_session.Session.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +def test_update_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.UpdateSessionRequest() + + +def test_update_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.UpdateSessionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.UpdateSessionRequest() + + +def test_update_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_session] = mock_rpc + request = {} + client.update_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_session_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_session.Session( + name="name_value", + state=gcd_session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + ) + response = await client.update_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.UpdateSessionRequest() + + +@pytest.mark.asyncio +async def test_update_session_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_session + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_session + ] = mock_object + + request = {} + await client.update_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_session_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.UpdateSessionRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_session.Session( + name="name_value", + state=gcd_session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + ) + response = await client.update_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.UpdateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_session.Session) + assert response.name == "name_value" + assert response.state == gcd_session.Session.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +@pytest.mark.asyncio +async def test_update_session_async_from_dict(): + await test_update_session_async(request_type=dict) + + +def test_update_session_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.UpdateSessionRequest() + + request.session.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + call.return_value = gcd_session.Session() + client.update_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "session.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_session_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.UpdateSessionRequest() + + request.session.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_session.Session()) + await client.update_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "session.name=name_value", + ) in kw["metadata"] + + +def test_update_session_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_session.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_session( + session=gcd_session.Session(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = gcd_session.Session(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_session_flattened_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_session( + conversational_search_service.UpdateSessionRequest(), + session=gcd_session.Session(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_session_flattened_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_session.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_session.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_session( + session=gcd_session.Session(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = gcd_session.Session(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_session_flattened_error_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_session( + conversational_search_service.UpdateSessionRequest(), + session=gcd_session.Session(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.GetSessionRequest, + dict, + ], +) +def test_get_session(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = session.Session( + name="name_value", + state=session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + response = client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.GetSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, session.Session) + assert response.name == "name_value" + assert response.state == session.Session.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +def test_get_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.GetSessionRequest() + + +def test_get_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.GetSessionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.GetSessionRequest( + name="name_value", + ) + + +def test_get_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_session] = mock_rpc + request = {} + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_session_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + session.Session( + name="name_value", + state=session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + ) + response = await client.get_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.GetSessionRequest() + + +@pytest.mark.asyncio +async def test_get_session_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_session + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_session + ] = mock_object + + request = {} + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_session_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.GetSessionRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + session.Session( + name="name_value", + state=session.Session.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + ) + response = await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.GetSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, session.Session) + assert response.name == "name_value" + assert response.state == session.Session.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +@pytest.mark.asyncio +async def test_get_session_async_from_dict(): + await test_get_session_async(request_type=dict) + + +def test_get_session_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.GetSessionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + call.return_value = session.Session() + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_session_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.GetSessionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(session.Session()) + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_session_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = session.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_session( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_session_flattened_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_session( + conversational_search_service.GetSessionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_session_flattened_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_session), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = session.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(session.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_session( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_session_flattened_error_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_session( + conversational_search_service.GetSessionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.ListSessionsRequest, + dict, + ], +) +def test_list_sessions(request_type, transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = conversational_search_service.ListSessionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = conversational_search_service.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.ListSessionsRequest() + + +def test_list_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = conversational_search_service.ListSessionsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.ListSessionsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sessions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.ListSessionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == conversational_search_service.ListSessionsRequest() + + +@pytest.mark.asyncio +async def test_list_sessions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sessions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sessions + ] = mock_object + + request = {} + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sessions_async( + transport: str = "grpc_asyncio", + request_type=conversational_search_service.ListSessionsRequest, +): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.ListSessionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = conversational_search_service.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + + +def test_list_sessions_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.ListSessionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value = conversational_search_service.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = conversational_search_service.ListSessionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.ListSessionsResponse() + ) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_sessions_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = conversational_search_service.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sessions_flattened_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + conversational_search_service.ListSessionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = conversational_search_service.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversational_search_service.ListSessionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + conversational_search_service.ListSessionsRequest(), + parent="parent_value", + ) + + +def test_list_sessions_pager(transport_name: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + session.Session(), + ], + next_page_token="abc", + ), + conversational_search_service.ListSessionsResponse( + sessions=[], + next_page_token="def", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + ], + next_page_token="ghi", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sessions(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, session.Session) for i in results) + + +def test_list_sessions_pages(transport_name: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + session.Session(), + ], + next_page_token="abc", + ), + conversational_search_service.ListSessionsResponse( + sessions=[], + next_page_token="def", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + ], + next_page_token="ghi", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + session.Session(), + ], + next_page_token="abc", + ), + conversational_search_service.ListSessionsResponse( + sessions=[], + next_page_token="def", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + ], + next_page_token="ghi", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sessions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, session.Session) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + session.Session(), + ], + next_page_token="abc", + ), + conversational_search_service.ListSessionsResponse( + sessions=[], + next_page_token="def", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + ], + next_page_token="ghi", + ), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sessions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -3797,7 +6550,2522 @@ async def test_list_conversations_async_pages(): dict, ], ) -def test_converse_conversation_rest(request_type): +def test_converse_conversation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.ConverseConversationResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversational_search_service.ConverseConversationResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.converse_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, conversational_search_service.ConverseConversationResponse + ) + + +def test_converse_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.converse_conversation + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.converse_conversation + ] = mock_rpc + + request = {} + client.converse_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.converse_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_converse_conversation_rest_required_fields( + request_type=conversational_search_service.ConverseConversationRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).converse_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).converse_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.ConverseConversationResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + conversational_search_service.ConverseConversationResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.converse_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_converse_conversation_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.converse_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_converse_conversation_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_converse_conversation", + ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "pre_converse_conversation", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = conversational_search_service.ConverseConversationRequest.pb( + conversational_search_service.ConverseConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + conversational_search_service.ConverseConversationResponse.to_json( + conversational_search_service.ConverseConversationResponse() + ) + ) + + request = conversational_search_service.ConverseConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = conversational_search_service.ConverseConversationResponse() + + client.converse_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_converse_conversation_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.ConverseConversationRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.converse_conversation(request) + + +def test_converse_conversation_rest_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.ConverseConversationResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + query=conversation.TextInput(input="input_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversational_search_service.ConverseConversationResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.converse_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}:converse" + % client.transport._host, + args[1], + ) + + +def test_converse_conversation_rest_flattened_error(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.converse_conversation( + conversational_search_service.ConverseConversationRequest(), + name="name_value", + query=conversation.TextInput(input="input_value"), + ) + + +def test_converse_conversation_rest_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.CreateConversationRequest, + dict, + ], +) +def test_create_conversation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["conversation"] = { + "name": "name_value", + "state": 1, + "user_pseudo_id": "user_pseudo_id_value", + "messages": [ + { + "user_input": { + "input": "input_value", + "context": { + "context_documents": [ + "context_documents_value1", + "context_documents_value2", + ], + "active_document": "active_document_value", + }, + }, + "reply": { + "summary": { + "summary_text": "summary_text_value", + "summary_skipped_reasons": [1], + "safety_attributes": { + "categories": ["categories_value1", "categories_value2"], + "scores": [0.656, 0.657], + }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + "chunk_contents": [ + { + "content": "content_value", + "page_identifier": "page_identifier_value", + } + ], + } + ], + }, + } + }, + "create_time": {"seconds": 751, "nanos": 543}, + } + ], + "start_time": {}, + "end_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = conversational_search_service.CreateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_conversation.Conversation( + name="name_value", + state=gcd_conversation.Conversation.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_conversation.Conversation) + assert response.name == "name_value" + assert response.state == gcd_conversation.Conversation.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +def test_create_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_conversation + ] = mock_rpc + + request = {} + client.create_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_conversation_rest_required_fields( + request_type=conversational_search_service.CreateConversationRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_conversation.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_conversation_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_conversation_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_conversation", + ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "pre_create_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = conversational_search_service.CreateConversationRequest.pb( + conversational_search_service.CreateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_conversation.Conversation.to_json( + gcd_conversation.Conversation() + ) + + request = conversational_search_service.CreateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_conversation.Conversation() + + client.create_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_conversation_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.CreateConversationRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_conversation(request) + + +def test_create_conversation_rest_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_conversation.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + conversation=gcd_conversation.Conversation(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/conversations" + % client.transport._host, + args[1], + ) + + +def test_create_conversation_rest_flattened_error(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversation( + conversational_search_service.CreateConversationRequest(), + parent="parent_value", + conversation=gcd_conversation.Conversation(name="name_value"), + ) + + +def test_create_conversation_rest_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.DeleteConversationRequest, + dict, + ], +) +def test_delete_conversation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_conversation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_conversation + ] = mock_rpc + + request = {} + client.delete_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_conversation_rest_required_fields( + request_type=conversational_search_service.DeleteConversationRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_conversation_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_conversation_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "pre_delete_conversation" + ) as pre: + pre.assert_not_called() + pb_message = conversational_search_service.DeleteConversationRequest.pb( + conversational_search_service.DeleteConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = conversational_search_service.DeleteConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_conversation_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.DeleteConversationRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_conversation(request) + + +def test_delete_conversation_rest_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_conversation_rest_flattened_error(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversation( + conversational_search_service.DeleteConversationRequest(), + name="name_value", + ) + + +def test_delete_conversation_rest_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.UpdateConversationRequest, + dict, + ], +) +def test_update_conversation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + } + request_init["conversation"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4", + "state": 1, + "user_pseudo_id": "user_pseudo_id_value", + "messages": [ + { + "user_input": { + "input": "input_value", + "context": { + "context_documents": [ + "context_documents_value1", + "context_documents_value2", + ], + "active_document": "active_document_value", + }, + }, + "reply": { + "summary": { + "summary_text": "summary_text_value", + "summary_skipped_reasons": [1], + "safety_attributes": { + "categories": ["categories_value1", "categories_value2"], + "scores": [0.656, 0.657], + }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + "chunk_contents": [ + { + "content": "content_value", + "page_identifier": "page_identifier_value", + } + ], + } + ], + }, + } + }, + "create_time": {"seconds": 751, "nanos": 543}, + } + ], + "start_time": {}, + "end_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = conversational_search_service.UpdateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_conversation.Conversation( + name="name_value", + state=gcd_conversation.Conversation.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_conversation.Conversation) + assert response.name == "name_value" + assert response.state == gcd_conversation.Conversation.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +def test_update_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_conversation + ] = mock_rpc + + request = {} + client.update_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_conversation_rest_required_fields( + request_type=conversational_search_service.UpdateConversationRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_conversation.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_conversation_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_conversation_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_conversation", + ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "pre_update_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = conversational_search_service.UpdateConversationRequest.pb( + conversational_search_service.UpdateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_conversation.Conversation.to_json( + gcd_conversation.Conversation() + ) + + request = conversational_search_service.UpdateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_conversation.Conversation() + + client.update_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_conversation_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.UpdateConversationRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_conversation(request) + + +def test_update_conversation_rest_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_conversation.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "conversation": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + conversation=gcd_conversation.Conversation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{conversation.name=projects/*/locations/*/dataStores/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_conversation_rest_flattened_error(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversation( + conversational_search_service.UpdateConversationRequest(), + conversation=gcd_conversation.Conversation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_conversation_rest_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.GetConversationRequest, + dict, + ], +) +def test_get_conversation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversation.Conversation( + name="name_value", + state=conversation.Conversation.State.IN_PROGRESS, + user_pseudo_id="user_pseudo_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, conversation.Conversation) + assert response.name == "name_value" + assert response.state == conversation.Conversation.State.IN_PROGRESS + assert response.user_pseudo_id == "user_pseudo_id_value" + + +def test_get_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_conversation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_conversation + ] = mock_rpc + + request = {} + client.get_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_conversation_rest_required_fields( + request_type=conversational_search_service.GetConversationRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = conversation.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_conversation_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_conversation_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "post_get_conversation" + ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "pre_get_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = conversational_search_service.GetConversationRequest.pb( + conversational_search_service.GetConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = conversation.Conversation.to_json( + conversation.Conversation() + ) + + request = conversational_search_service.GetConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = conversation.Conversation() + + client.get_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_conversation_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.GetConversationRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_conversation(request) + + +def test_get_conversation_rest_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversation.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversation.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_conversation_rest_flattened_error(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversation( + conversational_search_service.GetConversationRequest(), + name="name_value", + ) + + +def test_get_conversation_rest_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.ListConversationsRequest, + dict, + ], +) +def test_list_conversations_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.ListConversationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversational_search_service.ListConversationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_conversations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_conversations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_conversations in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_conversations + ] = mock_rpc + + request = {} + client.list_conversations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_conversations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_conversations_rest_required_fields( + request_type=conversational_search_service.ListConversationsRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_conversations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_conversations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.ListConversationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = conversational_search_service.ListConversationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_conversations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_conversations_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_conversations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_conversations_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "post_list_conversations" + ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "pre_list_conversations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = conversational_search_service.ListConversationsRequest.pb( + conversational_search_service.ListConversationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + conversational_search_service.ListConversationsResponse.to_json( + conversational_search_service.ListConversationsResponse() + ) + ) + + request = conversational_search_service.ListConversationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = conversational_search_service.ListConversationsResponse() + + client.list_conversations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_conversations_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.ListConversationsRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_conversations(request) + + +def test_list_conversations_rest_flattened(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.ListConversationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversational_search_service.ListConversationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_conversations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/conversations" + % client.transport._host, + args[1], + ) + + +def test_list_conversations_rest_flattened_error(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_conversations( + conversational_search_service.ListConversationsRequest(), + parent="parent_value", + ) + + +def test_list_conversations_rest_pager(transport: str = "rest"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + conversational_search_service.ListConversationsResponse( + conversations=[ + conversation.Conversation(), + conversation.Conversation(), + conversation.Conversation(), + ], + next_page_token="abc", + ), + conversational_search_service.ListConversationsResponse( + conversations=[], + next_page_token="def", + ), + conversational_search_service.ListConversationsResponse( + conversations=[ + conversation.Conversation(), + ], + next_page_token="ghi", + ), + conversational_search_service.ListConversationsResponse( + conversations=[ + conversation.Conversation(), + conversation.Conversation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + conversational_search_service.ListConversationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_conversations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, conversation.Conversation) for i in results) + + pages = list(client.list_conversations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.AnswerQueryRequest, + dict, + ], +) +def test_answer_query_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = conversational_search_service.AnswerQueryResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.answer_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" + + +def test_answer_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.answer_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.answer_query] = mock_rpc + + request = {} + client.answer_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.answer_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_answer_query_rest_required_fields( + request_type=conversational_search_service.AnswerQueryRequest, +): + transport_class = transports.ConversationalSearchServiceRestTransport + + request_init = {} + request_init["serving_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).answer_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["servingConfig"] = "serving_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).answer_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "servingConfig" in jsonified_request + assert jsonified_request["servingConfig"] == "serving_config_value" + + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = conversational_search_service.AnswerQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = conversational_search_service.AnswerQueryResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.answer_query(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_answer_query_rest_unset_required_fields(): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.answer_query._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "servingConfig", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_answer_query_rest_interceptors(null_interceptor): + transport = transports.ConversationalSearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConversationalSearchServiceRestInterceptor(), + ) + client = ConversationalSearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "post_answer_query" + ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, "pre_answer_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = conversational_search_service.AnswerQueryRequest.pb( + conversational_search_service.AnswerQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + conversational_search_service.AnswerQueryResponse.to_json( + conversational_search_service.AnswerQueryResponse() + ) + ) + + request = conversational_search_service.AnswerQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = conversational_search_service.AnswerQueryResponse() + + client.answer_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_answer_query_rest_bad_request( + transport: str = "rest", + request_type=conversational_search_service.AnswerQueryRequest, +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.answer_query(request) + + +def test_answer_query_rest_error(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + conversational_search_service.GetAnswerRequest, + dict, + ], +) +def test_get_answer_rest(request_type): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3805,35 +9073,46 @@ def test_converse_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4/answers/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversational_search_service.ConverseConversationResponse() + return_value = answer.Answer( + name="name_value", + state=answer.Answer.State.IN_PROGRESS, + answer_text="answer_text_value", + related_questions=["related_questions_value"], + answer_skipped_reasons=[ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversational_search_service.ConverseConversationResponse.pb( - return_value - ) + return_value = answer.Answer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.converse_conversation(request) + response = client.get_answer(request) # Establish that the response is the type that we expect. - assert isinstance( - response, conversational_search_service.ConverseConversationResponse - ) + assert isinstance(response, answer.Answer) + assert response.name == "name_value" + assert response.state == answer.Answer.State.IN_PROGRESS + assert response.answer_text == "answer_text_value" + assert response.related_questions == ["related_questions_value"] + assert response.answer_skipped_reasons == [ + answer.Answer.AnswerSkippedReason.ADVERSARIAL_QUERY_IGNORED + ] -def test_converse_conversation_rest_use_cached_wrapped_rpc(): +def test_get_answer_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3847,35 +9126,30 @@ def test_converse_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.converse_conversation - in client._transport._wrapped_methods - ) + assert client._transport.get_answer in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.converse_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_answer] = mock_rpc request = {} - client.converse_conversation(request) + client.get_answer(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.converse_conversation(request) + client.get_answer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_converse_conversation_rest_required_fields( - request_type=conversational_search_service.ConverseConversationRequest, +def test_get_answer_rest_required_fields( + request_type=conversational_search_service.GetAnswerRequest, ): transport_class = transports.ConversationalSearchServiceRestTransport @@ -3891,7 +9165,7 @@ def test_converse_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).converse_conversation._get_unset_required_fields(jsonified_request) + ).get_answer._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3900,7 +9174,7 @@ def test_converse_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).converse_conversation._get_unset_required_fields(jsonified_request) + ).get_answer._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3914,7 +9188,7 @@ def test_converse_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = conversational_search_service.ConverseConversationResponse() + return_value = answer.Answer() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3926,52 +9200,39 @@ def test_converse_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - conversational_search_service.ConverseConversationResponse.pb( - return_value - ) - ) + return_value = answer.Answer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.converse_conversation(request) + response = client.get_answer(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_converse_conversation_rest_unset_required_fields(): +def test_get_answer_rest_unset_required_fields(): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.converse_conversation._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "query", - ) - ) - ) + unset_fields = transport.get_answer._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_converse_conversation_rest_interceptors(null_interceptor): +def test_get_answer_rest_interceptors(null_interceptor): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3984,16 +9245,14 @@ def test_converse_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, - "post_converse_conversation", + transports.ConversationalSearchServiceRestInterceptor, "post_get_answer" ) as post, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, - "pre_converse_conversation", + transports.ConversationalSearchServiceRestInterceptor, "pre_get_answer" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = conversational_search_service.ConverseConversationRequest.pb( - conversational_search_service.ConverseConversationRequest() + pb_message = conversational_search_service.GetAnswerRequest.pb( + conversational_search_service.GetAnswerRequest() ) transcode.return_value = { "method": "post", @@ -4005,21 +9264,17 @@ def test_converse_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - conversational_search_service.ConverseConversationResponse.to_json( - conversational_search_service.ConverseConversationResponse() - ) - ) + req.return_value._content = answer.Answer.to_json(answer.Answer()) - request = conversational_search_service.ConverseConversationRequest() + request = conversational_search_service.GetAnswerRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = conversational_search_service.ConverseConversationResponse() + post.return_value = answer.Answer() - client.converse_conversation( + client.get_answer( request, metadata=[ ("key", "val"), @@ -4031,9 +9286,8 @@ def test_converse_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_converse_conversation_rest_bad_request( - transport: str = "rest", - request_type=conversational_search_service.ConverseConversationRequest, +def test_get_answer_rest_bad_request( + transport: str = "rest", request_type=conversational_search_service.GetAnswerRequest ): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4042,7 +9296,7 @@ def test_converse_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4/answers/sample5" } request = request_type(**request_init) @@ -4055,10 +9309,10 @@ def test_converse_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.converse_conversation(request) + client.get_answer(request) -def test_converse_conversation_rest_flattened(): +def test_get_answer_rest_flattened(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4067,17 +9321,16 @@ def test_converse_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversational_search_service.ConverseConversationResponse() + return_value = answer.Answer() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4/answers/sample5" } # get truthy value for each flattened field mock_args = dict( name="name_value", - query=conversation.TextInput(input="input_value"), ) mock_args.update(sample_request) @@ -4085,27 +9338,25 @@ def test_converse_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversational_search_service.ConverseConversationResponse.pb( - return_value - ) + return_value = answer.Answer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.converse_conversation(**mock_args) + client.get_answer(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}:converse" + "%s/v1/{name=projects/*/locations/*/dataStores/*/sessions/*/answers/*}" % client.transport._host, args[1], ) -def test_converse_conversation_rest_flattened_error(transport: str = "rest"): +def test_get_answer_rest_flattened_error(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4114,14 +9365,13 @@ def test_converse_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.converse_conversation( - conversational_search_service.ConverseConversationRequest(), + client.get_answer( + conversational_search_service.GetAnswerRequest(), name="name_value", - query=conversation.TextInput(input="input_value"), ) -def test_converse_conversation_rest_error(): +def test_get_answer_rest_error(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4130,11 +9380,11 @@ def test_converse_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - conversational_search_service.CreateConversationRequest, + conversational_search_service.CreateSessionRequest, dict, ], ) -def test_create_conversation_rest(request_type): +def test_create_session_rest(request_type): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4142,61 +9392,17 @@ def test_create_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} - request_init["conversation"] = { + request_init["session"] = { "name": "name_value", "state": 1, "user_pseudo_id": "user_pseudo_id_value", - "messages": [ + "turns": [ { - "user_input": { - "input": "input_value", - "context": { - "context_documents": [ - "context_documents_value1", - "context_documents_value2", - ], - "active_document": "active_document_value", - }, - }, - "reply": { - "summary": { - "summary_text": "summary_text_value", - "summary_skipped_reasons": [1], - "safety_attributes": { - "categories": ["categories_value1", "categories_value2"], - "scores": [0.656, 0.657], - }, - "summary_with_metadata": { - "summary": "summary_value", - "citation_metadata": { - "citations": [ - { - "start_index": 1189, - "end_index": 942, - "sources": [{"reference_index": 1574}], - } - ] - }, - "references": [ - { - "title": "title_value", - "document": "document_value", - "uri": "uri_value", - "chunk_contents": [ - { - "content": "content_value", - "page_identifier": "page_identifier_value", - } - ], - } - ], - }, - } - }, - "create_time": {"seconds": 751, "nanos": 543}, + "query": {"text": "text_value", "query_id": "query_id_value"}, + "answer": "answer_value", } ], - "start_time": {}, + "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -4204,8 +9410,8 @@ def test_create_conversation_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = conversational_search_service.CreateConversationRequest.meta.fields[ - "conversation" + test_field = conversational_search_service.CreateSessionRequest.meta.fields[ + "session" ] def get_message_fields(field): @@ -4234,7 +9440,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER + for field, value in request_init["session"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -4264,18 +9470,18 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] + for i in range(0, len(request_init["session"][field])): + del request_init["session"][field][i][subfield] else: - del request_init["conversation"][field][subfield] + del request_init["session"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_conversation.Conversation( + return_value = gcd_session.Session( name="name_value", - state=gcd_conversation.Conversation.State.IN_PROGRESS, + state=gcd_session.Session.State.IN_PROGRESS, user_pseudo_id="user_pseudo_id_value", ) @@ -4283,21 +9489,21 @@ def get_message_fields(field): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcd_conversation.Conversation.pb(return_value) + return_value = gcd_session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.create_session(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_conversation.Conversation) + assert isinstance(response, gcd_session.Session) assert response.name == "name_value" - assert response.state == gcd_conversation.Conversation.State.IN_PROGRESS + assert response.state == gcd_session.Session.State.IN_PROGRESS assert response.user_pseudo_id == "user_pseudo_id_value" -def test_create_conversation_rest_use_cached_wrapped_rpc(): +def test_create_session_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4311,34 +9517,30 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_conversation in client._transport._wrapped_methods - ) + assert client._transport.create_session in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_session] = mock_rpc request = {} - client.create_conversation(request) + client.create_session(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_conversation(request) + client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_conversation_rest_required_fields( - request_type=conversational_search_service.CreateConversationRequest, +def test_create_session_rest_required_fields( + request_type=conversational_search_service.CreateSessionRequest, ): transport_class = transports.ConversationalSearchServiceRestTransport @@ -4354,7 +9556,7 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).create_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4363,7 +9565,7 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).create_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4377,7 +9579,7 @@ def test_create_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_conversation.Conversation() + return_value = gcd_session.Session() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4399,38 +9601,38 @@ def test_create_conversation_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcd_conversation.Conversation.pb(return_value) + return_value = gcd_session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.create_session(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_conversation_rest_unset_required_fields(): +def test_create_session_rest_unset_required_fields(): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_conversation._get_unset_required_fields({}) + unset_fields = transport.create_session._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "conversation", + "session", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_conversation_rest_interceptors(null_interceptor): +def test_create_session_rest_interceptors(null_interceptor): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4443,15 +9645,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, - "post_create_conversation", + transports.ConversationalSearchServiceRestInterceptor, "post_create_session" ) as post, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "pre_create_conversation" + transports.ConversationalSearchServiceRestInterceptor, "pre_create_session" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = conversational_search_service.CreateConversationRequest.pb( - conversational_search_service.CreateConversationRequest() + pb_message = conversational_search_service.CreateSessionRequest.pb( + conversational_search_service.CreateSessionRequest() ) transcode.return_value = { "method": "post", @@ -4463,19 +9664,17 @@ def test_create_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_conversation.Conversation.to_json( - gcd_conversation.Conversation() - ) + req.return_value._content = gcd_session.Session.to_json(gcd_session.Session()) - request = conversational_search_service.CreateConversationRequest() + request = conversational_search_service.CreateSessionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_conversation.Conversation() + post.return_value = gcd_session.Session() - client.create_conversation( + client.create_session( request, metadata=[ ("key", "val"), @@ -4487,9 +9686,9 @@ def test_create_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_conversation_rest_bad_request( +def test_create_session_rest_bad_request( transport: str = "rest", - request_type=conversational_search_service.CreateConversationRequest, + request_type=conversational_search_service.CreateSessionRequest, ): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4509,10 +9708,10 @@ def test_create_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_conversation(request) + client.create_session(request) -def test_create_conversation_rest_flattened(): +def test_create_session_rest_flattened(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4521,7 +9720,7 @@ def test_create_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_conversation.Conversation() + return_value = gcd_session.Session() # get arguments that satisfy an http rule for this method sample_request = { @@ -4531,7 +9730,7 @@ def test_create_conversation_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - conversation=gcd_conversation.Conversation(name="name_value"), + session=gcd_session.Session(name="name_value"), ) mock_args.update(sample_request) @@ -4539,25 +9738,25 @@ def test_create_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcd_conversation.Conversation.pb(return_value) + return_value = gcd_session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_conversation(**mock_args) + client.create_session(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/dataStores/*}/conversations" + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/sessions" % client.transport._host, args[1], ) -def test_create_conversation_rest_flattened_error(transport: str = "rest"): +def test_create_session_rest_flattened_error(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4566,14 +9765,14 @@ def test_create_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_conversation( - conversational_search_service.CreateConversationRequest(), + client.create_session( + conversational_search_service.CreateSessionRequest(), parent="parent_value", - conversation=gcd_conversation.Conversation(name="name_value"), + session=gcd_session.Session(name="name_value"), ) -def test_create_conversation_rest_error(): +def test_create_session_rest_error(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4582,11 +9781,11 @@ def test_create_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - conversational_search_service.DeleteConversationRequest, + conversational_search_service.DeleteSessionRequest, dict, ], ) -def test_delete_conversation_rest(request_type): +def test_delete_session_rest(request_type): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4594,7 +9793,7 @@ def test_delete_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } request = request_type(**request_init) @@ -4610,13 +9809,13 @@ def test_delete_conversation_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_session(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_session_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4630,34 +9829,30 @@ def test_delete_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_conversation in client._transport._wrapped_methods - ) + assert client._transport.delete_session in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc request = {} - client.delete_conversation(request) + client.delete_session(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_conversation(request) + client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_conversation_rest_required_fields( - request_type=conversational_search_service.DeleteConversationRequest, +def test_delete_session_rest_required_fields( + request_type=conversational_search_service.DeleteSessionRequest, ): transport_class = transports.ConversationalSearchServiceRestTransport @@ -4673,7 +9868,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) + ).delete_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4682,7 +9877,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) + ).delete_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4720,24 +9915,24 @@ def test_delete_conversation_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_session(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_conversation_rest_unset_required_fields(): +def test_delete_session_rest_unset_required_fields(): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_conversation._get_unset_required_fields({}) + unset_fields = transport.delete_session._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_conversation_rest_interceptors(null_interceptor): +def test_delete_session_rest_interceptors(null_interceptor): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4750,11 +9945,11 @@ def test_delete_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "pre_delete_conversation" + transports.ConversationalSearchServiceRestInterceptor, "pre_delete_session" ) as pre: pre.assert_not_called() - pb_message = conversational_search_service.DeleteConversationRequest.pb( - conversational_search_service.DeleteConversationRequest() + pb_message = conversational_search_service.DeleteSessionRequest.pb( + conversational_search_service.DeleteSessionRequest() ) transcode.return_value = { "method": "post", @@ -4767,14 +9962,14 @@ def test_delete_conversation_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = conversational_search_service.DeleteConversationRequest() + request = conversational_search_service.DeleteSessionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_conversation( + client.delete_session( request, metadata=[ ("key", "val"), @@ -4785,9 +9980,9 @@ def test_delete_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_delete_conversation_rest_bad_request( +def test_delete_session_rest_bad_request( transport: str = "rest", - request_type=conversational_search_service.DeleteConversationRequest, + request_type=conversational_search_service.DeleteSessionRequest, ): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4796,7 +9991,7 @@ def test_delete_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } request = request_type(**request_init) @@ -4809,10 +10004,10 @@ def test_delete_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_conversation(request) + client.delete_session(request) -def test_delete_conversation_rest_flattened(): +def test_delete_session_rest_flattened(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4825,7 +10020,7 @@ def test_delete_conversation_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } # get truthy value for each flattened field @@ -4841,20 +10036,20 @@ def test_delete_conversation_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_conversation(**mock_args) + client.delete_session(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/dataStores/*/sessions/*}" % client.transport._host, args[1], ) -def test_delete_conversation_rest_flattened_error(transport: str = "rest"): +def test_delete_session_rest_flattened_error(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4863,13 +10058,13 @@ def test_delete_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_conversation( - conversational_search_service.DeleteConversationRequest(), + client.delete_session( + conversational_search_service.DeleteSessionRequest(), name="name_value", ) -def test_delete_conversation_rest_error(): +def test_delete_session_rest_error(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4878,11 +10073,11 @@ def test_delete_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - conversational_search_service.UpdateConversationRequest, + conversational_search_service.UpdateSessionRequest, dict, ], ) -def test_update_conversation_rest(request_type): +def test_update_session_rest(request_type): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4890,65 +10085,21 @@ def test_update_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "session": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } } - request_init["conversation"] = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4", + request_init["session"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4", "state": 1, "user_pseudo_id": "user_pseudo_id_value", - "messages": [ + "turns": [ { - "user_input": { - "input": "input_value", - "context": { - "context_documents": [ - "context_documents_value1", - "context_documents_value2", - ], - "active_document": "active_document_value", - }, - }, - "reply": { - "summary": { - "summary_text": "summary_text_value", - "summary_skipped_reasons": [1], - "safety_attributes": { - "categories": ["categories_value1", "categories_value2"], - "scores": [0.656, 0.657], - }, - "summary_with_metadata": { - "summary": "summary_value", - "citation_metadata": { - "citations": [ - { - "start_index": 1189, - "end_index": 942, - "sources": [{"reference_index": 1574}], - } - ] - }, - "references": [ - { - "title": "title_value", - "document": "document_value", - "uri": "uri_value", - "chunk_contents": [ - { - "content": "content_value", - "page_identifier": "page_identifier_value", - } - ], - } - ], - }, - } - }, - "create_time": {"seconds": 751, "nanos": 543}, + "query": {"text": "text_value", "query_id": "query_id_value"}, + "answer": "answer_value", } ], - "start_time": {}, + "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -4956,8 +10107,8 @@ def test_update_conversation_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = conversational_search_service.UpdateConversationRequest.meta.fields[ - "conversation" + test_field = conversational_search_service.UpdateSessionRequest.meta.fields[ + "session" ] def get_message_fields(field): @@ -4986,7 +10137,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER + for field, value in request_init["session"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -5016,18 +10167,18 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] + for i in range(0, len(request_init["session"][field])): + del request_init["session"][field][i][subfield] else: - del request_init["conversation"][field][subfield] + del request_init["session"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_conversation.Conversation( + return_value = gcd_session.Session( name="name_value", - state=gcd_conversation.Conversation.State.IN_PROGRESS, + state=gcd_session.Session.State.IN_PROGRESS, user_pseudo_id="user_pseudo_id_value", ) @@ -5035,21 +10186,21 @@ def get_message_fields(field): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcd_conversation.Conversation.pb(return_value) + return_value = gcd_session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.update_session(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_conversation.Conversation) + assert isinstance(response, gcd_session.Session) assert response.name == "name_value" - assert response.state == gcd_conversation.Conversation.State.IN_PROGRESS + assert response.state == gcd_session.Session.State.IN_PROGRESS assert response.user_pseudo_id == "user_pseudo_id_value" -def test_update_conversation_rest_use_cached_wrapped_rpc(): +def test_update_session_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5063,34 +10214,30 @@ def test_update_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_conversation in client._transport._wrapped_methods - ) + assert client._transport.update_session in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_session] = mock_rpc request = {} - client.update_conversation(request) + client.update_session(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_conversation(request) + client.update_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_conversation_rest_required_fields( - request_type=conversational_search_service.UpdateConversationRequest, +def test_update_session_rest_required_fields( + request_type=conversational_search_service.UpdateSessionRequest, ): transport_class = transports.ConversationalSearchServiceRestTransport @@ -5105,14 +10252,14 @@ def test_update_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) + ).update_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) + ).update_session._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -5126,7 +10273,7 @@ def test_update_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_conversation.Conversation() + return_value = gcd_session.Session() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5148,30 +10295,30 @@ def test_update_conversation_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcd_conversation.Conversation.pb(return_value) + return_value = gcd_session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.update_session(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_conversation_rest_unset_required_fields(): +def test_update_session_rest_unset_required_fields(): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + unset_fields = transport.update_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("session",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_conversation_rest_interceptors(null_interceptor): +def test_update_session_rest_interceptors(null_interceptor): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5184,15 +10331,14 @@ def test_update_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, - "post_update_conversation", + transports.ConversationalSearchServiceRestInterceptor, "post_update_session" ) as post, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "pre_update_conversation" + transports.ConversationalSearchServiceRestInterceptor, "pre_update_session" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = conversational_search_service.UpdateConversationRequest.pb( - conversational_search_service.UpdateConversationRequest() + pb_message = conversational_search_service.UpdateSessionRequest.pb( + conversational_search_service.UpdateSessionRequest() ) transcode.return_value = { "method": "post", @@ -5204,19 +10350,17 @@ def test_update_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_conversation.Conversation.to_json( - gcd_conversation.Conversation() - ) + req.return_value._content = gcd_session.Session.to_json(gcd_session.Session()) - request = conversational_search_service.UpdateConversationRequest() + request = conversational_search_service.UpdateSessionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_conversation.Conversation() + post.return_value = gcd_session.Session() - client.update_conversation( + client.update_session( request, metadata=[ ("key", "val"), @@ -5228,9 +10372,9 @@ def test_update_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_conversation_rest_bad_request( +def test_update_session_rest_bad_request( transport: str = "rest", - request_type=conversational_search_service.UpdateConversationRequest, + request_type=conversational_search_service.UpdateSessionRequest, ): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5239,8 +10383,8 @@ def test_update_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "session": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } } request = request_type(**request_init) @@ -5254,10 +10398,10 @@ def test_update_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_conversation(request) + client.update_session(request) -def test_update_conversation_rest_flattened(): +def test_update_session_rest_flattened(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5266,18 +10410,18 @@ def test_update_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_conversation.Conversation() + return_value = gcd_session.Session() # get arguments that satisfy an http rule for this method sample_request = { - "conversation": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "session": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } } # get truthy value for each flattened field mock_args = dict( - conversation=gcd_conversation.Conversation(name="name_value"), + session=gcd_session.Session(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5286,25 +10430,25 @@ def test_update_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcd_conversation.Conversation.pb(return_value) + return_value = gcd_session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_conversation(**mock_args) + client.update_session(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{conversation.name=projects/*/locations/*/dataStores/*/conversations/*}" + "%s/v1/{session.name=projects/*/locations/*/dataStores/*/sessions/*}" % client.transport._host, args[1], ) -def test_update_conversation_rest_flattened_error(transport: str = "rest"): +def test_update_session_rest_flattened_error(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5313,14 +10457,14 @@ def test_update_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_conversation( - conversational_search_service.UpdateConversationRequest(), - conversation=gcd_conversation.Conversation(name="name_value"), + client.update_session( + conversational_search_service.UpdateSessionRequest(), + session=gcd_session.Session(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_conversation_rest_error(): +def test_update_session_rest_error(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5329,11 +10473,11 @@ def test_update_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - conversational_search_service.GetConversationRequest, + conversational_search_service.GetSessionRequest, dict, ], ) -def test_get_conversation_rest(request_type): +def test_get_session_rest(request_type): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5341,16 +10485,16 @@ def test_get_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversation.Conversation( + return_value = session.Session( name="name_value", - state=conversation.Conversation.State.IN_PROGRESS, + state=session.Session.State.IN_PROGRESS, user_pseudo_id="user_pseudo_id_value", ) @@ -5358,21 +10502,21 @@ def test_get_conversation_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversation.Conversation.pb(return_value) + return_value = session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_session(request) # Establish that the response is the type that we expect. - assert isinstance(response, conversation.Conversation) + assert isinstance(response, session.Session) assert response.name == "name_value" - assert response.state == conversation.Conversation.State.IN_PROGRESS + assert response.state == session.Session.State.IN_PROGRESS assert response.user_pseudo_id == "user_pseudo_id_value" -def test_get_conversation_rest_use_cached_wrapped_rpc(): +def test_get_session_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5386,32 +10530,30 @@ def test_get_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_conversation in client._transport._wrapped_methods + assert client._transport.get_session in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_session] = mock_rpc request = {} - client.get_conversation(request) + client.get_session(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_conversation(request) + client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_conversation_rest_required_fields( - request_type=conversational_search_service.GetConversationRequest, +def test_get_session_rest_required_fields( + request_type=conversational_search_service.GetSessionRequest, ): transport_class = transports.ConversationalSearchServiceRestTransport @@ -5427,7 +10569,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) + ).get_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5436,7 +10578,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) + ).get_session._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5450,7 +10592,7 @@ def test_get_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = conversation.Conversation() + return_value = session.Session() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5471,30 +10613,30 @@ def test_get_conversation_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversation.Conversation.pb(return_value) + return_value = session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_session(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_conversation_rest_unset_required_fields(): +def test_get_session_rest_unset_required_fields(): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_conversation._get_unset_required_fields({}) + unset_fields = transport.get_session._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_conversation_rest_interceptors(null_interceptor): +def test_get_session_rest_interceptors(null_interceptor): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5507,14 +10649,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "post_get_conversation" + transports.ConversationalSearchServiceRestInterceptor, "post_get_session" ) as post, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "pre_get_conversation" + transports.ConversationalSearchServiceRestInterceptor, "pre_get_session" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = conversational_search_service.GetConversationRequest.pb( - conversational_search_service.GetConversationRequest() + pb_message = conversational_search_service.GetSessionRequest.pb( + conversational_search_service.GetSessionRequest() ) transcode.return_value = { "method": "post", @@ -5526,19 +10668,17 @@ def test_get_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = conversation.Conversation.to_json( - conversation.Conversation() - ) + req.return_value._content = session.Session.to_json(session.Session()) - request = conversational_search_service.GetConversationRequest() + request = conversational_search_service.GetSessionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = conversation.Conversation() + post.return_value = session.Session() - client.get_conversation( + client.get_session( request, metadata=[ ("key", "val"), @@ -5550,9 +10690,9 @@ def test_get_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_conversation_rest_bad_request( +def test_get_session_rest_bad_request( transport: str = "rest", - request_type=conversational_search_service.GetConversationRequest, + request_type=conversational_search_service.GetSessionRequest, ): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5561,7 +10701,7 @@ def test_get_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } request = request_type(**request_init) @@ -5574,10 +10714,10 @@ def test_get_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_conversation(request) + client.get_session(request) -def test_get_conversation_rest_flattened(): +def test_get_session_rest_flattened(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5586,11 +10726,11 @@ def test_get_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversation.Conversation() + return_value = session.Session() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/conversations/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/sessions/sample4" } # get truthy value for each flattened field @@ -5603,25 +10743,25 @@ def test_get_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversation.Conversation.pb(return_value) + return_value = session.Session.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_conversation(**mock_args) + client.get_session(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/dataStores/*/sessions/*}" % client.transport._host, args[1], ) -def test_get_conversation_rest_flattened_error(transport: str = "rest"): +def test_get_session_rest_flattened_error(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5630,13 +10770,13 @@ def test_get_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_conversation( - conversational_search_service.GetConversationRequest(), + client.get_session( + conversational_search_service.GetSessionRequest(), name="name_value", ) -def test_get_conversation_rest_error(): +def test_get_session_rest_error(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5645,11 +10785,11 @@ def test_get_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - conversational_search_service.ListConversationsRequest, + conversational_search_service.ListSessionsRequest, dict, ], ) -def test_list_conversations_rest(request_type): +def test_list_sessions_rest(request_type): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5662,7 +10802,7 @@ def test_list_conversations_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversational_search_service.ListConversationsResponse( + return_value = conversational_search_service.ListSessionsResponse( next_page_token="next_page_token_value", ) @@ -5670,21 +10810,21 @@ def test_list_conversations_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversational_search_service.ListConversationsResponse.pb( + return_value = conversational_search_service.ListSessionsResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_sessions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversationsPager) + assert isinstance(response, pagers.ListSessionsPager) assert response.next_page_token == "next_page_token_value" -def test_list_conversations_rest_use_cached_wrapped_rpc(): +def test_list_sessions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5698,34 +10838,30 @@ def test_list_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_conversations in client._transport._wrapped_methods - ) + assert client._transport.list_sessions in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc request = {} - client.list_conversations(request) + client.list_sessions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_conversations(request) + client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_conversations_rest_required_fields( - request_type=conversational_search_service.ListConversationsRequest, +def test_list_sessions_rest_required_fields( + request_type=conversational_search_service.ListSessionsRequest, ): transport_class = transports.ConversationalSearchServiceRestTransport @@ -5741,7 +10877,7 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_sessions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5750,7 +10886,7 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_sessions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -5773,7 +10909,7 @@ def test_list_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = conversational_search_service.ListConversationsResponse() + return_value = conversational_search_service.ListSessionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5794,7 +10930,7 @@ def test_list_conversations_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversational_search_service.ListConversationsResponse.pb( + return_value = conversational_search_service.ListSessionsResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -5802,19 +10938,19 @@ def test_list_conversations_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_sessions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_conversations_rest_unset_required_fields(): +def test_list_sessions_rest_unset_required_fields(): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_conversations._get_unset_required_fields({}) + unset_fields = transport.list_sessions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -5829,7 +10965,7 @@ def test_list_conversations_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_conversations_rest_interceptors(null_interceptor): +def test_list_sessions_rest_interceptors(null_interceptor): transport = transports.ConversationalSearchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5842,14 +10978,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "post_list_conversations" + transports.ConversationalSearchServiceRestInterceptor, "post_list_sessions" ) as post, mock.patch.object( - transports.ConversationalSearchServiceRestInterceptor, "pre_list_conversations" + transports.ConversationalSearchServiceRestInterceptor, "pre_list_sessions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = conversational_search_service.ListConversationsRequest.pb( - conversational_search_service.ListConversationsRequest() + pb_message = conversational_search_service.ListSessionsRequest.pb( + conversational_search_service.ListSessionsRequest() ) transcode.return_value = { "method": "post", @@ -5862,20 +10998,20 @@ def test_list_conversations_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - conversational_search_service.ListConversationsResponse.to_json( - conversational_search_service.ListConversationsResponse() + conversational_search_service.ListSessionsResponse.to_json( + conversational_search_service.ListSessionsResponse() ) ) - request = conversational_search_service.ListConversationsRequest() + request = conversational_search_service.ListSessionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = conversational_search_service.ListConversationsResponse() + post.return_value = conversational_search_service.ListSessionsResponse() - client.list_conversations( + client.list_sessions( request, metadata=[ ("key", "val"), @@ -5887,9 +11023,9 @@ def test_list_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_conversations_rest_bad_request( +def test_list_sessions_rest_bad_request( transport: str = "rest", - request_type=conversational_search_service.ListConversationsRequest, + request_type=conversational_search_service.ListSessionsRequest, ): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5909,10 +11045,10 @@ def test_list_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_conversations(request) + client.list_sessions(request) -def test_list_conversations_rest_flattened(): +def test_list_sessions_rest_flattened(): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5921,7 +11057,7 @@ def test_list_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversational_search_service.ListConversationsResponse() + return_value = conversational_search_service.ListSessionsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -5938,27 +11074,27 @@ def test_list_conversations_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = conversational_search_service.ListConversationsResponse.pb( + return_value = conversational_search_service.ListSessionsResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_conversations(**mock_args) + client.list_sessions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/dataStores/*}/conversations" + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/sessions" % client.transport._host, args[1], ) -def test_list_conversations_rest_flattened_error(transport: str = "rest"): +def test_list_sessions_rest_flattened_error(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5967,13 +11103,13 @@ def test_list_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_conversations( - conversational_search_service.ListConversationsRequest(), + client.list_sessions( + conversational_search_service.ListSessionsRequest(), parent="parent_value", ) -def test_list_conversations_rest_pager(transport: str = "rest"): +def test_list_sessions_rest_pager(transport: str = "rest"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5983,30 +11119,30 @@ def test_list_conversations_rest_pager(transport: str = "rest"): with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - conversational_search_service.ListConversationsResponse( - conversations=[ - conversation.Conversation(), - conversation.Conversation(), - conversation.Conversation(), + # Set the response as a series of pages + response = ( + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), + session.Session(), ], next_page_token="abc", ), - conversational_search_service.ListConversationsResponse( - conversations=[], + conversational_search_service.ListSessionsResponse( + sessions=[], next_page_token="def", ), - conversational_search_service.ListConversationsResponse( - conversations=[ - conversation.Conversation(), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), ], next_page_token="ghi", ), - conversational_search_service.ListConversationsResponse( - conversations=[ - conversation.Conversation(), - conversation.Conversation(), + conversational_search_service.ListSessionsResponse( + sessions=[ + session.Session(), + session.Session(), ], ), ) @@ -6015,7 +11151,7 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - conversational_search_service.ListConversationsResponse.to_json(x) + conversational_search_service.ListSessionsResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) @@ -6028,13 +11164,13 @@ def test_list_conversations_rest_pager(transport: str = "rest"): "parent": "projects/sample1/locations/sample2/dataStores/sample3" } - pager = client.list_conversations(request=sample_request) + pager = client.list_sessions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, conversation.Conversation) for i in results) + assert all(isinstance(i, session.Session) for i in results) - pages = list(client.list_conversations(request=sample_request).pages) + pages = list(client.list_sessions(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6184,7 +11320,15 @@ def test_conversational_search_service_base_transport(): "update_conversation", "get_conversation", "list_conversations", + "answer_query", + "get_answer", + "create_session", + "delete_session", + "update_session", + "get_session", + "list_sessions", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -6471,6 +11615,27 @@ def test_conversational_search_service_client_transport_session_collision( session1 = client1.transport.list_conversations._session session2 = client2.transport.list_conversations._session assert session1 != session2 + session1 = client1.transport.answer_query._session + session2 = client2.transport.answer_query._session + assert session1 != session2 + session1 = client1.transport.get_answer._session + session2 = client2.transport.get_answer._session + assert session1 != session2 + session1 = client1.transport.create_session._session + session2 = client2.transport.create_session._session + assert session1 != session2 + session1 = client1.transport.delete_session._session + session2 = client2.transport.delete_session._session + assert session1 != session2 + session1 = client1.transport.update_session._session + session2 = client2.transport.update_session._session + assert session1 != session2 + session1 = client1.transport.get_session._session + session2 = client2.transport.get_session._session + assert session1 != session2 + session1 = client1.transport.list_sessions._session + session2 = client2.transport.list_sessions._session + assert session1 != session2 def test_conversational_search_service_grpc_transport_channel(): @@ -6599,11 +11764,82 @@ def test_conversational_search_service_transport_channel_mtls_with_adc(transport assert transport.grpc_channel == mock_grpc_channel -def test_conversation_path(): +def test_answer_path(): project = "squid" location = "clam" data_store = "whelk" - conversation = "octopus" + session = "octopus" + answer = "oyster" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/sessions/{session}/answers/{answer}".format( + project=project, + location=location, + data_store=data_store, + session=session, + answer=answer, + ) + actual = ConversationalSearchServiceClient.answer_path( + project, location, data_store, session, answer + ) + assert expected == actual + + +def test_parse_answer_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "data_store": "mussel", + "session": "winkle", + "answer": "nautilus", + } + path = ConversationalSearchServiceClient.answer_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationalSearchServiceClient.parse_answer_path(path) + assert expected == actual + + +def test_chunk_path(): + project = "scallop" + location = "abalone" + data_store = "squid" + branch = "clam" + document = "whelk" + chunk = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}/chunks/{chunk}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + chunk=chunk, + ) + actual = ConversationalSearchServiceClient.chunk_path( + project, location, data_store, branch, document, chunk + ) + assert expected == actual + + +def test_parse_chunk_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "branch": "mussel", + "document": "winkle", + "chunk": "nautilus", + } + path = ConversationalSearchServiceClient.chunk_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationalSearchServiceClient.parse_chunk_path(path) + assert expected == actual + + +def test_conversation_path(): + project = "scallop" + location = "abalone" + data_store = "squid" + conversation = "clam" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/conversations/{conversation}".format( project=project, location=location, @@ -6618,10 +11854,10 @@ def test_conversation_path(): def test_parse_conversation_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "data_store": "cuttlefish", - "conversation": "mussel", + "project": "whelk", + "location": "octopus", + "data_store": "oyster", + "conversation": "nudibranch", } path = ConversationalSearchServiceClient.conversation_path(**expected) @@ -6631,9 +11867,9 @@ def test_parse_conversation_path(): def test_data_store_path(): - project = "winkle" - location = "nautilus" - data_store = "scallop" + project = "cuttlefish" + location = "mussel" + data_store = "winkle" expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( project=project, location=location, @@ -6647,9 +11883,9 @@ def test_data_store_path(): def test_parse_data_store_path(): expected = { - "project": "abalone", - "location": "squid", - "data_store": "clam", + "project": "nautilus", + "location": "scallop", + "data_store": "abalone", } path = ConversationalSearchServiceClient.data_store_path(**expected) @@ -6659,11 +11895,11 @@ def test_parse_data_store_path(): def test_document_path(): - project = "whelk" - location = "octopus" - data_store = "oyster" - branch = "nudibranch" - document = "cuttlefish" + project = "squid" + location = "clam" + data_store = "whelk" + branch = "octopus" + document = "oyster" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( project=project, location=location, @@ -6679,11 +11915,11 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "mussel", - "location": "winkle", - "data_store": "nautilus", - "branch": "scallop", - "document": "abalone", + "project": "nudibranch", + "location": "cuttlefish", + "data_store": "mussel", + "branch": "winkle", + "document": "nautilus", } path = ConversationalSearchServiceClient.document_path(**expected) @@ -6693,10 +11929,10 @@ def test_parse_document_path(): def test_serving_config_path(): - project = "squid" - location = "clam" - data_store = "whelk" - serving_config = "octopus" + project = "scallop" + location = "abalone" + data_store = "squid" + serving_config = "clam" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( project=project, location=location, @@ -6711,10 +11947,10 @@ def test_serving_config_path(): def test_parse_serving_config_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "data_store": "cuttlefish", - "serving_config": "mussel", + "project": "whelk", + "location": "octopus", + "data_store": "oyster", + "serving_config": "nudibranch", } path = ConversationalSearchServiceClient.serving_config_path(**expected) @@ -6723,8 +11959,39 @@ def test_parse_serving_config_path(): assert expected == actual +def test_session_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + session = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/sessions/{session}".format( + project=project, + location=location, + data_store=data_store, + session=session, + ) + actual = ConversationalSearchServiceClient.session_path( + project, location, data_store, session + ) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "session": "clam", + } + path = ConversationalSearchServiceClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationalSearchServiceClient.parse_session_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6736,7 +12003,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "octopus", } path = ConversationalSearchServiceClient.common_billing_account_path(**expected) @@ -6746,7 +12013,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -6756,7 +12023,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nudibranch", } path = ConversationalSearchServiceClient.common_folder_path(**expected) @@ -6766,7 +12033,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -6776,7 +12043,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "mussel", } path = ConversationalSearchServiceClient.common_organization_path(**expected) @@ -6786,7 +12053,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -6796,7 +12063,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nautilus", } path = ConversationalSearchServiceClient.common_project_path(**expected) @@ -6806,8 +12073,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6818,8 +12085,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "squid", + "location": "clam", } path = ConversationalSearchServiceClient.common_location_path(**expected) @@ -6865,6 +12132,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -6979,6 +12304,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py index d81f51b3f445..3b9d1ac3fb37 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -63,10 +63,14 @@ pagers, transports, ) +from google.cloud.discoveryengine_v1.types import ( + data_store_service, + document_processing_config, + schema, +) from google.cloud.discoveryengine_v1.types import common from google.cloud.discoveryengine_v1.types import data_store from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store -from google.cloud.discoveryengine_v1.types import data_store_service, schema def client_cert_source_callback(): @@ -2383,13 +2387,13 @@ def test_list_data_stores_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_stores(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3370,6 +3374,20 @@ def test_create_data_store_rest(request_type): "default_schema_id": "default_schema_id_value", "content_config": 1, "create_time": {"seconds": 751, "nanos": 543}, + "document_processing_config": { + "name": "name_value", + "default_parsing_config": { + "digital_parsing_config": {}, + "ocr_parsing_config": { + "enhanced_document_elements": [ + "enhanced_document_elements_value1", + "enhanced_document_elements_value2", + ], + "use_native_text": True, + }, + }, + "parsing_config_overrides": {}, + }, "starting_schema": { "struct_schema": {"fields": {}}, "json_schema": "json_schema_value", @@ -4784,6 +4802,20 @@ def test_update_data_store_rest(request_type): "default_schema_id": "default_schema_id_value", "content_config": 1, "create_time": {"seconds": 751, "nanos": 543}, + "document_processing_config": { + "name": "name_value", + "default_parsing_config": { + "digital_parsing_config": {}, + "ocr_parsing_config": { + "enhanced_document_elements": [ + "enhanced_document_elements_value1", + "enhanced_document_elements_value2", + ], + "use_native_text": True, + }, + }, + "parsing_config_overrides": {}, + }, "starting_schema": { "struct_schema": {"fields": {}}, "json_schema": "json_schema_value", @@ -5308,6 +5340,7 @@ def test_data_store_service_base_transport(): "delete_data_store", "update_data_store", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -5820,11 +5853,39 @@ def test_parse_data_store_path(): assert expected == actual -def test_schema_path(): +def test_document_processing_config_path(): project = "squid" location = "clam" data_store = "whelk" - schema = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/documentProcessingConfig".format( + project=project, + location=location, + data_store=data_store, + ) + actual = DataStoreServiceClient.document_processing_config_path( + project, location, data_store + ) + assert expected == actual + + +def test_parse_document_processing_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = DataStoreServiceClient.document_processing_config_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_document_processing_config_path(path) + assert expected == actual + + +def test_schema_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + schema = "nautilus" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/schemas/{schema}".format( project=project, location=location, @@ -5837,10 +5898,10 @@ def test_schema_path(): def test_parse_schema_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "data_store": "cuttlefish", - "schema": "mussel", + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "schema": "clam", } path = DataStoreServiceClient.schema_path(**expected) @@ -5850,7 +5911,7 @@ def test_parse_schema_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5860,7 +5921,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "octopus", } path = DataStoreServiceClient.common_billing_account_path(**expected) @@ -5870,7 +5931,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -5880,7 +5941,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nudibranch", } path = DataStoreServiceClient.common_folder_path(**expected) @@ -5890,7 +5951,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -5900,7 +5961,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "mussel", } path = DataStoreServiceClient.common_organization_path(**expected) @@ -5910,7 +5971,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -5920,7 +5981,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nautilus", } path = DataStoreServiceClient.common_project_path(**expected) @@ -5930,8 +5991,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5942,8 +6003,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "squid", + "location": "clam", } path = DataStoreServiceClient.common_location_path(**expected) @@ -5989,6 +6050,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -6103,6 +6222,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = DataStoreServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py index 5d62dcceba8e..193e63157848 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -1950,13 +1950,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6453,6 +6453,7 @@ def test_document_service_base_transport(): "import_documents", "purge_documents", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -7151,6 +7152,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -7265,6 +7324,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py index fff03b6f46f2..0f1d7af0c15c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py @@ -3145,13 +3145,13 @@ def test_list_engines_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_engines(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5293,6 +5293,7 @@ def test_engine_service_base_transport(): "get_engine", "list_engines", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -5946,6 +5947,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -6060,6 +6119,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = EngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py new file mode 100644 index 000000000000..dff32622545d --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py @@ -0,0 +1,3130 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.grounded_generation_service import ( + GroundedGenerationServiceAsyncClient, + GroundedGenerationServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import grounded_generation_service, grounding + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GroundedGenerationServiceClient._get_default_mtls_endpoint(None) is None + assert ( + GroundedGenerationServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GroundedGenerationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GroundedGenerationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GroundedGenerationServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + GroundedGenerationServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + GroundedGenerationServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + GroundedGenerationServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert GroundedGenerationServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert GroundedGenerationServiceClient._get_client_cert_source(None, False) is None + assert ( + GroundedGenerationServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + GroundedGenerationServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + GroundedGenerationServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + GroundedGenerationServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + GroundedGenerationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceClient), +) +@mock.patch.object( + GroundedGenerationServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = GroundedGenerationServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + GroundedGenerationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = GroundedGenerationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == GroundedGenerationServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == GroundedGenerationServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == GroundedGenerationServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + GroundedGenerationServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + GroundedGenerationServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + GroundedGenerationServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + GroundedGenerationServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + GroundedGenerationServiceClient._get_universe_domain(None, None) + == GroundedGenerationServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + GroundedGenerationServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GroundedGenerationServiceClient, "grpc"), + (GroundedGenerationServiceAsyncClient, "grpc_asyncio"), + (GroundedGenerationServiceClient, "rest"), + ], +) +def test_grounded_generation_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GroundedGenerationServiceGrpcTransport, "grpc"), + (transports.GroundedGenerationServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GroundedGenerationServiceRestTransport, "rest"), + ], +) +def test_grounded_generation_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GroundedGenerationServiceClient, "grpc"), + (GroundedGenerationServiceAsyncClient, "grpc_asyncio"), + (GroundedGenerationServiceClient, "rest"), + ], +) +def test_grounded_generation_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_grounded_generation_service_client_get_transport_class(): + transport = GroundedGenerationServiceClient.get_transport_class() + available_transports = [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceRestTransport, + ] + assert transport in available_transports + + transport = GroundedGenerationServiceClient.get_transport_class("grpc") + assert transport == transports.GroundedGenerationServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + GroundedGenerationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceClient), +) +@mock.patch.object( + GroundedGenerationServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceAsyncClient), +) +def test_grounded_generation_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + GroundedGenerationServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + GroundedGenerationServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + "true", + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + "false", + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceRestTransport, + "rest", + "true", + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + GroundedGenerationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceClient), +) +@mock.patch.object( + GroundedGenerationServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_grounded_generation_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [GroundedGenerationServiceClient, GroundedGenerationServiceAsyncClient], +) +@mock.patch.object( + GroundedGenerationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GroundedGenerationServiceClient), +) +@mock.patch.object( + GroundedGenerationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GroundedGenerationServiceAsyncClient), +) +def test_grounded_generation_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [GroundedGenerationServiceClient, GroundedGenerationServiceAsyncClient], +) +@mock.patch.object( + GroundedGenerationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceClient), +) +@mock.patch.object( + GroundedGenerationServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GroundedGenerationServiceAsyncClient), +) +def test_grounded_generation_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = GroundedGenerationServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + GroundedGenerationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = GroundedGenerationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceRestTransport, + "rest", + ), + ], +) +def test_grounded_generation_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceRestTransport, + "rest", + None, + ), + ], +) +def test_grounded_generation_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_grounded_generation_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.grounded_generation_service.transports.GroundedGenerationServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GroundedGenerationServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_grounded_generation_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + grounded_generation_service.CheckGroundingRequest, + dict, + ], +) +def test_check_grounding(request_type, transport: str = "grpc"): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grounded_generation_service.CheckGroundingResponse( + support_score=0.1432, + ) + response = client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = grounded_generation_service.CheckGroundingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grounded_generation_service.CheckGroundingResponse) + assert math.isclose(response.support_score, 0.1432, rel_tol=1e-6) + + +def test_check_grounding_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_grounding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == grounded_generation_service.CheckGroundingRequest() + + +def test_check_grounding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = grounded_generation_service.CheckGroundingRequest( + grounding_config="grounding_config_value", + answer_candidate="answer_candidate_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_grounding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == grounded_generation_service.CheckGroundingRequest( + grounding_config="grounding_config_value", + answer_candidate="answer_candidate_value", + ) + + +def test_check_grounding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_grounding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.check_grounding] = mock_rpc + request = {} + client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_grounding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_grounding_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grounded_generation_service.CheckGroundingResponse( + support_score=0.1432, + ) + ) + response = await client.check_grounding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == grounded_generation_service.CheckGroundingRequest() + + +@pytest.mark.asyncio +async def test_check_grounding_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.check_grounding + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.check_grounding + ] = mock_object + + request = {} + await client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.check_grounding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_grounding_async( + transport: str = "grpc_asyncio", + request_type=grounded_generation_service.CheckGroundingRequest, +): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grounded_generation_service.CheckGroundingResponse( + support_score=0.1432, + ) + ) + response = await client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = grounded_generation_service.CheckGroundingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grounded_generation_service.CheckGroundingResponse) + assert math.isclose(response.support_score, 0.1432, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_check_grounding_async_from_dict(): + await test_check_grounding_async(request_type=dict) + + +def test_check_grounding_field_headers(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grounded_generation_service.CheckGroundingRequest() + + request.grounding_config = "grounding_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + call.return_value = grounded_generation_service.CheckGroundingResponse() + client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "grounding_config=grounding_config_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_grounding_field_headers_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grounded_generation_service.CheckGroundingRequest() + + request.grounding_config = "grounding_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_grounding), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grounded_generation_service.CheckGroundingResponse() + ) + await client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "grounding_config=grounding_config_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + grounded_generation_service.CheckGroundingRequest, + dict, + ], +) +def test_check_grounding_rest(request_type): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "grounding_config": "projects/sample1/locations/sample2/groundingConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = grounded_generation_service.CheckGroundingResponse( + support_score=0.1432, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = grounded_generation_service.CheckGroundingResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_grounding(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, grounded_generation_service.CheckGroundingResponse) + assert math.isclose(response.support_score, 0.1432, rel_tol=1e-6) + + +def test_check_grounding_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_grounding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.check_grounding] = mock_rpc + + request = {} + client.check_grounding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_grounding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_check_grounding_rest_required_fields( + request_type=grounded_generation_service.CheckGroundingRequest, +): + transport_class = transports.GroundedGenerationServiceRestTransport + + request_init = {} + request_init["grounding_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_grounding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["groundingConfig"] = "grounding_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_grounding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "groundingConfig" in jsonified_request + assert jsonified_request["groundingConfig"] == "grounding_config_value" + + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = grounded_generation_service.CheckGroundingResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = grounded_generation_service.CheckGroundingResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.check_grounding(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_check_grounding_rest_unset_required_fields(): + transport = transports.GroundedGenerationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.check_grounding._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("groundingConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_grounding_rest_interceptors(null_interceptor): + transport = transports.GroundedGenerationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GroundedGenerationServiceRestInterceptor(), + ) + client = GroundedGenerationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, "post_check_grounding" + ) as post, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, "pre_check_grounding" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = grounded_generation_service.CheckGroundingRequest.pb( + grounded_generation_service.CheckGroundingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + grounded_generation_service.CheckGroundingResponse.to_json( + grounded_generation_service.CheckGroundingResponse() + ) + ) + + request = grounded_generation_service.CheckGroundingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = grounded_generation_service.CheckGroundingResponse() + + client.check_grounding( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_grounding_rest_bad_request( + transport: str = "rest", + request_type=grounded_generation_service.CheckGroundingRequest, +): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "grounding_config": "projects/sample1/locations/sample2/groundingConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_grounding(request) + + +def test_check_grounding_rest_error(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GroundedGenerationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GroundedGenerationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GroundedGenerationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GroundedGenerationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GroundedGenerationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GroundedGenerationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GroundedGenerationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GroundedGenerationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GroundedGenerationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GroundedGenerationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GroundedGenerationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GroundedGenerationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + transports.GroundedGenerationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = GroundedGenerationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GroundedGenerationServiceGrpcTransport, + ) + + +def test_grounded_generation_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GroundedGenerationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_grounded_generation_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.grounded_generation_service.transports.GroundedGenerationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GroundedGenerationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "check_grounding", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_grounded_generation_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.grounded_generation_service.transports.GroundedGenerationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GroundedGenerationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_grounded_generation_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.grounded_generation_service.transports.GroundedGenerationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GroundedGenerationServiceTransport() + adc.assert_called_once() + + +def test_grounded_generation_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GroundedGenerationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + ], +) +def test_grounded_generation_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + transports.GroundedGenerationServiceRestTransport, + ], +) +def test_grounded_generation_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GroundedGenerationServiceGrpcTransport, grpc_helpers), + (transports.GroundedGenerationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_grounded_generation_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + ], +) +def test_grounded_generation_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_grounded_generation_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GroundedGenerationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_grounded_generation_service_host_no_port(transport_name): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_grounded_generation_service_host_with_port(transport_name): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_grounded_generation_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GroundedGenerationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GroundedGenerationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.check_grounding._session + session2 = client2.transport.check_grounding._session + assert session1 != session2 + + +def test_grounded_generation_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GroundedGenerationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_grounded_generation_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GroundedGenerationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + ], +) +def test_grounded_generation_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroundedGenerationServiceGrpcTransport, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + ], +) +def test_grounded_generation_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_grounding_config_path(): + project = "squid" + location = "clam" + grounding_config = "whelk" + expected = "projects/{project}/locations/{location}/groundingConfigs/{grounding_config}".format( + project=project, + location=location, + grounding_config=grounding_config, + ) + actual = GroundedGenerationServiceClient.grounding_config_path( + project, location, grounding_config + ) + assert expected == actual + + +def test_parse_grounding_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "grounding_config": "nudibranch", + } + path = GroundedGenerationServiceClient.grounding_config_path(**expected) + + # Check that the path construction is reversible. + actual = GroundedGenerationServiceClient.parse_grounding_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GroundedGenerationServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = GroundedGenerationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GroundedGenerationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GroundedGenerationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = GroundedGenerationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GroundedGenerationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GroundedGenerationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = GroundedGenerationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GroundedGenerationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = GroundedGenerationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = GroundedGenerationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GroundedGenerationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GroundedGenerationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = GroundedGenerationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GroundedGenerationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GroundedGenerationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GroundedGenerationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GroundedGenerationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + GroundedGenerationServiceClient, + transports.GroundedGenerationServiceGrpcTransport, + ), + ( + GroundedGenerationServiceAsyncClient, + transports.GroundedGenerationServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py new file mode 100644 index 000000000000..ac47ee276645 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py @@ -0,0 +1,3252 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.project_service import ( + ProjectServiceAsyncClient, + ProjectServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import project, project_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProjectServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ProjectServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ProjectServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ProjectServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ProjectServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ProjectServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ProjectServiceClient._get_client_cert_source(None, False) is None + assert ( + ProjectServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ProjectServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ProjectServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ProjectServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ProjectServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ProjectServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProjectServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ProjectServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ProjectServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ProjectServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ProjectServiceClient._get_universe_domain(None, None) + == ProjectServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ProjectServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProjectServiceClient, "grpc"), + (ProjectServiceAsyncClient, "grpc_asyncio"), + (ProjectServiceClient, "rest"), + ], +) +def test_project_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ProjectServiceGrpcTransport, "grpc"), + (transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ProjectServiceRestTransport, "rest"), + ], +) +def test_project_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProjectServiceClient, "grpc"), + (ProjectServiceAsyncClient, "grpc_asyncio"), + (ProjectServiceClient, "rest"), + ], +) +def test_project_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_project_service_client_get_transport_class(): + transport = ProjectServiceClient.get_transport_class() + available_transports = [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceRestTransport, + ] + assert transport in available_transports + + transport = ProjectServiceClient.get_transport_class("grpc") + assert transport == transports.ProjectServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +def test_project_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProjectServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProjectServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc", "true"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc", "false"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", "true"), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_project_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ProjectServiceClient, ProjectServiceAsyncClient] +) +@mock.patch.object( + ProjectServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProjectServiceAsyncClient), +) +def test_project_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ProjectServiceClient, ProjectServiceAsyncClient] +) +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +def test_project_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), + ], +) +def test_project_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProjectServiceClient, + transports.ProjectServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", None), + ], +) +def test_project_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_project_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.project_service.transports.ProjectServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ProjectServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProjectServiceClient, + transports.ProjectServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_project_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + project_service.ProvisionProjectRequest, + dict, + ], +) +def test_provision_project(request_type, transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = project_service.ProvisionProjectRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_provision_project_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_project() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.ProvisionProjectRequest() + + +def test_provision_project_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = project_service.ProvisionProjectRequest( + name="name_value", + data_use_terms_version="data_use_terms_version_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_project(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.ProvisionProjectRequest( + name="name_value", + data_use_terms_version="data_use_terms_version_value", + ) + + +def test_provision_project_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.provision_project in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_project + ] = mock_rpc + request = {} + client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.provision_project(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_provision_project_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.provision_project() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.ProvisionProjectRequest() + + +@pytest.mark.asyncio +async def test_provision_project_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.provision_project + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.provision_project + ] = mock_object + + request = {} + await client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.provision_project(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_provision_project_async( + transport: str = "grpc_asyncio", + request_type=project_service.ProvisionProjectRequest, +): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = project_service.ProvisionProjectRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_provision_project_async_from_dict(): + await test_provision_project_async(request_type=dict) + + +def test_provision_project_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = project_service.ProvisionProjectRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_provision_project_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = project_service.ProvisionProjectRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_provision_project_flattened(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.provision_project( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_provision_project_flattened_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.provision_project( + project_service.ProvisionProjectRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_provision_project_flattened_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.provision_project( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_provision_project_flattened_error_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.provision_project( + project_service.ProvisionProjectRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + project_service.ProvisionProjectRequest, + dict, + ], +) +def test_provision_project_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.provision_project(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_provision_project_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.provision_project in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_project + ] = mock_rpc + + request = {} + client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.provision_project(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_provision_project_rest_required_fields( + request_type=project_service.ProvisionProjectRequest, +): + transport_class = transports.ProjectServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["accept_data_use_terms"] = False + request_init["data_use_terms_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provision_project._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["acceptDataUseTerms"] = True + jsonified_request["dataUseTermsVersion"] = "data_use_terms_version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provision_project._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "acceptDataUseTerms" in jsonified_request + assert jsonified_request["acceptDataUseTerms"] == True + assert "dataUseTermsVersion" in jsonified_request + assert jsonified_request["dataUseTermsVersion"] == "data_use_terms_version_value" + + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.provision_project(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_provision_project_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.provision_project._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "acceptDataUseTerms", + "dataUseTermsVersion", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_provision_project_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProjectServiceRestInterceptor(), + ) + client = ProjectServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_provision_project" + ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "pre_provision_project" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = project_service.ProvisionProjectRequest.pb( + project_service.ProvisionProjectRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = project_service.ProvisionProjectRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.provision_project( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_provision_project_rest_bad_request( + transport: str = "rest", request_type=project_service.ProvisionProjectRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.provision_project(request) + + +def test_provision_project_rest_flattened(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.provision_project(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*}:provision" % client.transport._host, args[1] + ) + + +def test_provision_project_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.provision_project( + project_service.ProvisionProjectRequest(), + name="name_value", + ) + + +def test_provision_project_rest_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProjectServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ProjectServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + transports.ProjectServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProjectServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ProjectServiceGrpcTransport, + ) + + +def test_project_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProjectServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_project_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.project_service.transports.ProjectServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProjectServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "provision_project", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_project_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.project_service.transports.ProjectServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_project_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.project_service.transports.ProjectServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectServiceTransport() + adc.assert_called_once() + + +def test_project_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProjectServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + transports.ProjectServiceRestTransport, + ], +) +def test_project_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ProjectServiceGrpcTransport, grpc_helpers), + (transports.ProjectServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_project_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_project_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProjectServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_project_service_rest_lro_client(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_project_service_host_no_port(transport_name): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_project_service_host_with_port(transport_name): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_project_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProjectServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProjectServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.provision_project._session + session2 = client2.transport.provision_project._session + assert session1 != session2 + + +def test_project_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProjectServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_project_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProjectServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_project_service_grpc_lro_client(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_project_service_grpc_lro_async_client(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ProjectServiceClient.project_path(project) + assert expected == actual + + +def test_parse_project_path(): + expected = { + "project": "clam", + } + path = ProjectServiceClient.project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_project_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProjectServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ProjectServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProjectServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ProjectServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProjectServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ProjectServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ProjectServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ProjectServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProjectServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ProjectServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ProjectServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProjectServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProjectServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport), + (ProjectServiceAsyncClient, transports.ProjectServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py new file mode 100644 index 000000000000..b91152a97f60 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py @@ -0,0 +1,2968 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.rank_service import ( + RankServiceAsyncClient, + RankServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import rank_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RankServiceClient._get_default_mtls_endpoint(None) is None + assert ( + RankServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + RankServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RankServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RankServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert RankServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert RankServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RankServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RankServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + RankServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RankServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RankServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RankServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RankServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RankServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert RankServiceClient._get_client_cert_source(None, False) is None + assert ( + RankServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + RankServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RankServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RankServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RankServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceClient), +) +@mock.patch.object( + RankServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RankServiceClient._DEFAULT_UNIVERSE + default_endpoint = RankServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RankServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RankServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RankServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RankServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RankServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + RankServiceClient._get_api_endpoint(None, None, default_universe, "always") + == RankServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RankServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RankServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RankServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + RankServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RankServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RankServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RankServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + RankServiceClient._get_universe_domain(None, None) + == RankServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RankServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc"), + (RankServiceClient, transports.RankServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RankServiceClient, "grpc"), + (RankServiceAsyncClient, "grpc_asyncio"), + (RankServiceClient, "rest"), + ], +) +def test_rank_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RankServiceGrpcTransport, "grpc"), + (transports.RankServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RankServiceRestTransport, "rest"), + ], +) +def test_rank_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RankServiceClient, "grpc"), + (RankServiceAsyncClient, "grpc_asyncio"), + (RankServiceClient, "rest"), + ], +) +def test_rank_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_rank_service_client_get_transport_class(): + transport = RankServiceClient.get_transport_class() + available_transports = [ + transports.RankServiceGrpcTransport, + transports.RankServiceRestTransport, + ] + assert transport in available_transports + + transport = RankServiceClient.get_transport_class("grpc") + assert transport == transports.RankServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc"), + ( + RankServiceAsyncClient, + transports.RankServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RankServiceClient, transports.RankServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + RankServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceClient), +) +@mock.patch.object( + RankServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceAsyncClient), +) +def test_rank_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RankServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RankServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc", "true"), + ( + RankServiceAsyncClient, + transports.RankServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc", "false"), + ( + RankServiceAsyncClient, + transports.RankServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (RankServiceClient, transports.RankServiceRestTransport, "rest", "true"), + (RankServiceClient, transports.RankServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + RankServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceClient), +) +@mock.patch.object( + RankServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_rank_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [RankServiceClient, RankServiceAsyncClient]) +@mock.patch.object( + RankServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RankServiceClient) +) +@mock.patch.object( + RankServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RankServiceAsyncClient), +) +def test_rank_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [RankServiceClient, RankServiceAsyncClient]) +@mock.patch.object( + RankServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceClient), +) +@mock.patch.object( + RankServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RankServiceAsyncClient), +) +def test_rank_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RankServiceClient._DEFAULT_UNIVERSE + default_endpoint = RankServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RankServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc"), + ( + RankServiceAsyncClient, + transports.RankServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RankServiceClient, transports.RankServiceRestTransport, "rest"), + ], +) +def test_rank_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc", grpc_helpers), + ( + RankServiceAsyncClient, + transports.RankServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (RankServiceClient, transports.RankServiceRestTransport, "rest", None), + ], +) +def test_rank_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_rank_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.rank_service.transports.RankServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = RankServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (RankServiceClient, transports.RankServiceGrpcTransport, "grpc", grpc_helpers), + ( + RankServiceAsyncClient, + transports.RankServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_rank_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rank_service.RankRequest, + dict, + ], +) +def test_rank(request_type, transport: str = "grpc"): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = rank_service.RankResponse() + response = client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = rank_service.RankRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, rank_service.RankResponse) + + +def test_rank_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.rank() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rank_service.RankRequest() + + +def test_rank_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = rank_service.RankRequest( + ranking_config="ranking_config_value", + model="model_value", + query="query_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.rank(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rank_service.RankRequest( + ranking_config="ranking_config_value", + model="model_value", + query="query_value", + ) + + +def test_rank_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rank in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rank] = mock_rpc + request = {} + client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rank(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_rank_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rank_service.RankResponse() + ) + response = await client.rank() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rank_service.RankRequest() + + +@pytest.mark.asyncio +async def test_rank_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.rank in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.rank + ] = mock_object + + request = {} + await client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.rank(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_rank_async( + transport: str = "grpc_asyncio", request_type=rank_service.RankRequest +): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rank_service.RankResponse() + ) + response = await client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = rank_service.RankRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, rank_service.RankResponse) + + +@pytest.mark.asyncio +async def test_rank_async_from_dict(): + await test_rank_async(request_type=dict) + + +def test_rank_field_headers(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rank_service.RankRequest() + + request.ranking_config = "ranking_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + call.return_value = rank_service.RankResponse() + client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "ranking_config=ranking_config_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rank_field_headers_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rank_service.RankRequest() + + request.ranking_config = "ranking_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rank), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rank_service.RankResponse() + ) + await client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "ranking_config=ranking_config_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + rank_service.RankRequest, + dict, + ], +) +def test_rank_rest(request_type): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "ranking_config": "projects/sample1/locations/sample2/rankingConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rank_service.RankResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = rank_service.RankResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rank(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, rank_service.RankResponse) + + +def test_rank_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rank in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rank] = mock_rpc + + request = {} + client.rank(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rank(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_rank_rest_required_fields(request_type=rank_service.RankRequest): + transport_class = transports.RankServiceRestTransport + + request_init = {} + request_init["ranking_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rank._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["rankingConfig"] = "ranking_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rank._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "rankingConfig" in jsonified_request + assert jsonified_request["rankingConfig"] == "ranking_config_value" + + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rank_service.RankResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = rank_service.RankResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rank(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rank_rest_unset_required_fields(): + transport = transports.RankServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rank._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "rankingConfig", + "records", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rank_rest_interceptors(null_interceptor): + transport = transports.RankServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RankServiceRestInterceptor(), + ) + client = RankServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RankServiceRestInterceptor, "post_rank" + ) as post, mock.patch.object( + transports.RankServiceRestInterceptor, "pre_rank" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rank_service.RankRequest.pb(rank_service.RankRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = rank_service.RankResponse.to_json( + rank_service.RankResponse() + ) + + request = rank_service.RankRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = rank_service.RankResponse() + + client.rank( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rank_rest_bad_request( + transport: str = "rest", request_type=rank_service.RankRequest +): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "ranking_config": "projects/sample1/locations/sample2/rankingConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rank(request) + + +def test_rank_rest_error(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RankServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RankServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RankServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RankServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RankServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RankServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RankServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RankServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RankServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RankServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RankServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RankServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RankServiceGrpcTransport, + transports.RankServiceGrpcAsyncIOTransport, + transports.RankServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = RankServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RankServiceGrpcTransport, + ) + + +def test_rank_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RankServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_rank_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.rank_service.transports.RankServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RankServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "rank", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_rank_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.rank_service.transports.RankServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RankServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_rank_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.rank_service.transports.RankServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RankServiceTransport() + adc.assert_called_once() + + +def test_rank_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RankServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RankServiceGrpcTransport, + transports.RankServiceGrpcAsyncIOTransport, + ], +) +def test_rank_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RankServiceGrpcTransport, + transports.RankServiceGrpcAsyncIOTransport, + transports.RankServiceRestTransport, + ], +) +def test_rank_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RankServiceGrpcTransport, grpc_helpers), + (transports.RankServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_rank_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.RankServiceGrpcTransport, transports.RankServiceGrpcAsyncIOTransport], +) +def test_rank_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_rank_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RankServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_rank_service_host_no_port(transport_name): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_rank_service_host_with_port(transport_name): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_rank_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RankServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RankServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.rank._session + session2 = client2.transport.rank._session + assert session1 != session2 + + +def test_rank_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RankServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_rank_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RankServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.RankServiceGrpcTransport, transports.RankServiceGrpcAsyncIOTransport], +) +def test_rank_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.RankServiceGrpcTransport, transports.RankServiceGrpcAsyncIOTransport], +) +def test_rank_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_ranking_config_path(): + project = "squid" + location = "clam" + ranking_config = "whelk" + expected = "projects/{project}/locations/{location}/rankingConfigs/{ranking_config}".format( + project=project, + location=location, + ranking_config=ranking_config, + ) + actual = RankServiceClient.ranking_config_path(project, location, ranking_config) + assert expected == actual + + +def test_parse_ranking_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "ranking_config": "nudibranch", + } + path = RankServiceClient.ranking_config_path(**expected) + + # Check that the path construction is reversible. + actual = RankServiceClient.parse_ranking_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RankServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = RankServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RankServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RankServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = RankServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RankServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RankServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = RankServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RankServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = RankServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = RankServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RankServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RankServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = RankServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RankServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RankServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RankServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RankServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RankServiceClient, transports.RankServiceGrpcTransport), + (RankServiceAsyncClient, transports.RankServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py index f77695c25536..87371c63622a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py @@ -1896,6 +1896,7 @@ def test_recommendation_service_base_transport(): methods = ( "recommend", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2288,12 +2289,38 @@ def test_recommendation_service_transport_channel_mtls_with_adc(transport_class) assert transport.grpc_channel == mock_grpc_channel -def test_document_path(): +def test_data_store_path(): project = "squid" location = "clam" data_store = "whelk" - branch = "octopus" - document = "oyster" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = RecommendationServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = RecommendationServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = RecommendationServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_document_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + branch = "nautilus" + document = "scallop" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( project=project, location=location, @@ -2309,11 +2336,11 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "data_store": "mussel", - "branch": "winkle", - "document": "nautilus", + "project": "abalone", + "location": "squid", + "data_store": "clam", + "branch": "whelk", + "document": "octopus", } path = RecommendationServiceClient.document_path(**expected) @@ -2322,11 +2349,42 @@ def test_parse_document_path(): assert expected == actual +def test_engine_path(): + project = "oyster" + location = "nudibranch" + collection = "cuttlefish" + engine = "mussel" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = RecommendationServiceClient.engine_path( + project, location, collection, engine + ) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "collection": "scallop", + "engine": "abalone", + } + path = RecommendationServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = RecommendationServiceClient.parse_engine_path(path) + assert expected == actual + + def test_serving_config_path(): - project = "scallop" - location = "abalone" - data_store = "squid" - serving_config = "clam" + project = "squid" + location = "clam" + data_store = "whelk" + serving_config = "octopus" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( project=project, location=location, @@ -2341,10 +2399,10 @@ def test_serving_config_path(): def test_parse_serving_config_path(): expected = { - "project": "whelk", - "location": "octopus", - "data_store": "oyster", - "serving_config": "nudibranch", + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "serving_config": "mussel", } path = RecommendationServiceClient.serving_config_path(**expected) @@ -2354,7 +2412,7 @@ def test_parse_serving_config_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2364,7 +2422,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nautilus", } path = RecommendationServiceClient.common_billing_account_path(**expected) @@ -2374,7 +2432,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -2384,7 +2442,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "abalone", } path = RecommendationServiceClient.common_folder_path(**expected) @@ -2394,7 +2452,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -2404,7 +2462,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "clam", } path = RecommendationServiceClient.common_organization_path(**expected) @@ -2414,7 +2472,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -2424,7 +2482,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "octopus", } path = RecommendationServiceClient.common_project_path(**expected) @@ -2434,8 +2492,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2446,8 +2504,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "cuttlefish", + "location": "mussel", } path = RecommendationServiceClient.common_location_path(**expected) @@ -2493,6 +2551,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2607,6 +2723,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = RecommendationServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py index 117b4739c1c2..27660344e91e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py @@ -1899,13 +1899,13 @@ def test_list_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5018,6 +5018,7 @@ def test_schema_service_base_transport(): "update_schema", "delete_schema", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -5669,6 +5670,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -5783,6 +5842,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py index 22f4e1c6f01a..28cba95acdf6 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py @@ -1471,13 +1471,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("serving_config", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2081,6 +2081,7 @@ def test_search_service_base_transport(): methods = ( "search", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2729,6 +2730,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2843,6 +2902,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py index a57f4df1ce9c..f22929eef9f3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py @@ -2332,6 +2332,7 @@ def test_get_target_site(request_type, transport: str = "grpc"): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) response = client.get_target_site(request) @@ -2349,6 +2350,7 @@ def test_get_target_site(request_type, transport: str = "grpc"): assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -2455,6 +2457,7 @@ async def test_get_target_site_empty_call_async(): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) ) @@ -2534,6 +2537,7 @@ async def test_get_target_site_async( type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) ) @@ -2552,6 +2556,7 @@ async def test_get_target_site_async( assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -3921,13 +3926,13 @@ def test_list_target_sites_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_target_sites(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5652,13 +5657,13 @@ def test_fetch_domain_verification_status_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("site_search_engine", ""),)), ) pager = client.fetch_domain_verification_status(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6149,6 +6154,7 @@ def test_create_target_site_rest(request_type): "type_": 1, "exact_match": True, "generated_uri_pattern": "generated_uri_pattern_value", + "root_domain_uri": "root_domain_uri_value", "site_verification_info": { "site_verification_state": 1, "verify_time": {"seconds": 751, "nanos": 543}, @@ -6823,6 +6829,7 @@ def test_get_target_site_rest(request_type): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) @@ -6844,6 +6851,7 @@ def test_get_target_site_rest(request_type): assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -7142,6 +7150,7 @@ def test_update_target_site_rest(request_type): "type_": 1, "exact_match": True, "generated_uri_pattern": "generated_uri_pattern_value", + "root_domain_uri": "root_domain_uri_value", "site_verification_info": { "site_verification_state": 1, "verify_time": {"seconds": 751, "nanos": 543}, @@ -9754,6 +9763,7 @@ def test_site_search_engine_service_base_transport(): "batch_verify_target_sites", "fetch_domain_verification_status", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -10436,6 +10446,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -10550,6 +10618,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py index 1caee8a8c252..948d60c21506 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -1189,6 +1189,8 @@ def test_write_user_event(request_type, transport: str = "grpc"): call.return_value = user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1208,6 +1210,8 @@ def test_write_user_event(request_type, transport: str = "grpc"): assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -1316,6 +1320,8 @@ async def test_write_user_event_empty_call_async(): user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1397,6 +1403,8 @@ async def test_write_user_event_async( user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1417,6 +1425,8 @@ async def test_write_user_event_async( assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -2129,6 +2139,8 @@ def test_write_user_event_rest(request_type): request_init["user_event"] = { "event_type": "event_type_value", "user_pseudo_id": "user_pseudo_id_value", + "engine": "engine_value", + "data_store": "data_store_value", "event_time": {"seconds": 751, "nanos": 543}, "user_info": {"user_id": "user_id_value", "user_agent": "user_agent_value"}, "direct_user_request": True, @@ -2256,6 +2268,8 @@ def get_message_fields(field): return_value = user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -2279,6 +2293,8 @@ def get_message_fields(field): assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -2352,6 +2368,8 @@ def test_write_user_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).write_user_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("write_async",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2407,7 +2425,7 @@ def test_write_user_event_rest_unset_required_fields(): unset_fields = transport.write_user_event._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("writeAsync",)) & set( ( "parent", @@ -3175,6 +3193,7 @@ def test_user_event_service_base_transport(): "collect_user_event", "import_user_events", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3687,8 +3706,37 @@ def test_parse_document_path(): assert expected == actual +def test_engine_path(): + project = "oyster" + location = "nudibranch" + collection = "cuttlefish" + engine = "mussel" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = UserEventServiceClient.engine_path(project, location, collection, engine) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "collection": "scallop", + "engine": "abalone", + } + path = UserEventServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_engine_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3698,7 +3746,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = UserEventServiceClient.common_billing_account_path(**expected) @@ -3708,7 +3756,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -3718,7 +3766,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = UserEventServiceClient.common_folder_path(**expected) @@ -3728,7 +3776,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -3738,7 +3786,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = UserEventServiceClient.common_organization_path(**expected) @@ -3748,7 +3796,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -3758,7 +3806,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = UserEventServiceClient.common_project_path(**expected) @@ -3768,8 +3816,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3780,8 +3828,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = UserEventServiceClient.common_location_path(**expected) @@ -3827,6 +3875,64 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3941,6 +4047,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = UserEventServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py index 01e6271bf485..056184ddd5ed 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py @@ -2583,6 +2583,7 @@ def test_acl_config_service_base_transport(): "update_acl_config", "get_acl_config", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3139,6 +3140,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = AclConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = AclConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3265,6 +3329,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = AclConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = AclConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = AclConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = AclConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = AclConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = AclConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = AclConfigServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py index 70f373ceeee1..bcfa1e59d4ee 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py @@ -1120,6 +1120,7 @@ def test_get_chunk(request_type, transport: str = "grpc"): name="name_value", id="id_value", content="content_value", + relevance_score=0.1584, ) response = client.get_chunk(request) @@ -1134,6 +1135,7 @@ def test_get_chunk(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.id == "id_value" assert response.content == "content_value" + assert math.isclose(response.relevance_score, 0.1584, rel_tol=1e-6) def test_get_chunk_empty_call(): @@ -1235,6 +1237,7 @@ async def test_get_chunk_empty_call_async(): name="name_value", id="id_value", content="content_value", + relevance_score=0.1584, ) ) response = await client.get_chunk() @@ -1308,6 +1311,7 @@ async def test_get_chunk_async( name="name_value", id="id_value", content="content_value", + relevance_score=0.1584, ) ) response = await client.get_chunk(request) @@ -1323,6 +1327,7 @@ async def test_get_chunk_async( assert response.name == "name_value" assert response.id == "id_value" assert response.content == "content_value" + assert math.isclose(response.relevance_score, 0.1584, rel_tol=1e-6) @pytest.mark.asyncio @@ -1877,13 +1882,13 @@ def test_list_chunks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_chunks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2056,6 +2061,7 @@ def test_get_chunk_rest(request_type): name="name_value", id="id_value", content="content_value", + relevance_score=0.1584, ) # Wrap the value into a proper Response obj @@ -2074,6 +2080,7 @@ def test_get_chunk_rest(request_type): assert response.name == "name_value" assert response.id == "id_value" assert response.content == "content_value" + assert math.isclose(response.relevance_score, 0.1584, rel_tol=1e-6) def test_get_chunk_rest_use_cached_wrapped_rpc(): @@ -2858,6 +2865,7 @@ def test_chunk_service_base_transport(): "get_chunk", "list_chunks", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3451,6 +3459,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ChunkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ChunkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3577,6 +3648,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ChunkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ChunkServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ChunkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ChunkServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ChunkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ChunkServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ChunkServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py index 07fc56809cbd..afd11b47a545 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py @@ -3025,6 +3025,7 @@ def test_completion_service_base_transport(): "import_suggestion_deny_list_entries", "purge_suggestion_deny_list_entries", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3643,6 +3644,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3769,6 +3833,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = CompletionServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py new file mode 100644 index 000000000000..9e3e550d1435 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py @@ -0,0 +1,6535 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1alpha.services.control_service import ( + ControlServiceAsyncClient, + ControlServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1alpha.types import common +from google.cloud.discoveryengine_v1alpha.types import control +from google.cloud.discoveryengine_v1alpha.types import control as gcd_control +from google.cloud.discoveryengine_v1alpha.types import control_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ControlServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ControlServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ControlServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ControlServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ControlServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ControlServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ControlServiceClient._get_client_cert_source(None, False) is None + assert ( + ControlServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ControlServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ControlServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ControlServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + default_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ControlServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ControlServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ControlServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ControlServiceClient._get_universe_domain(None, None) + == ControlServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ControlServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ControlServiceClient, "grpc"), + (ControlServiceAsyncClient, "grpc_asyncio"), + (ControlServiceClient, "rest"), + ], +) +def test_control_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ControlServiceGrpcTransport, "grpc"), + (transports.ControlServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ControlServiceRestTransport, "rest"), + ], +) +def test_control_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ControlServiceClient, "grpc"), + (ControlServiceAsyncClient, "grpc_asyncio"), + (ControlServiceClient, "rest"), + ], +) +def test_control_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_control_service_client_get_transport_class(): + transport = ControlServiceClient.get_transport_class() + available_transports = [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceRestTransport, + ] + assert transport in available_transports + + transport = ControlServiceClient.get_transport_class("grpc") + assert transport == transports.ControlServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test_control_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ControlServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ControlServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc", "true"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc", "false"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", "true"), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_control_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ControlServiceClient, ControlServiceAsyncClient] +) +@mock.patch.object( + ControlServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ControlServiceAsyncClient), +) +def test_control_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ControlServiceClient, ControlServiceAsyncClient] +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test_control_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + default_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +def test_control_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ControlServiceClient, + transports.ControlServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", None), + ], +) +def test_control_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_control_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1alpha.services.control_service.transports.ControlServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ControlServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ControlServiceClient, + transports.ControlServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_control_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.CreateControlRequest, + dict, + ], +) +def test_create_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.CreateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_create_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest() + + +def test_create_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.CreateControlRequest( + parent="parent_value", + control_id="control_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest( + parent="parent_value", + control_id="control_id_value", + ) + + +def test_create_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_control] = mock_rpc + request = {} + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.create_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest() + + +@pytest.mark.asyncio +async def test_create_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_control + ] = mock_object + + request = {} + await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_control_async( + transport: str = "grpc_asyncio", request_type=control_service.CreateControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.CreateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_create_control_async_from_dict(): + await test_create_control_async(request_type=dict) + + +def test_create_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.CreateControlRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value = gcd_control.Control() + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.CreateControlRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_control( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].control_id + mock_val = "control_id_value" + assert arg == mock_val + + +def test_create_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_control( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].control_id + mock_val = "control_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.DeleteControlRequest, + dict, + ], +) +def test_delete_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.DeleteControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest() + + +def test_delete_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.DeleteControlRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest( + name="name_value", + ) + + +def test_delete_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_control] = mock_rpc + request = {} + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest() + + +@pytest.mark.asyncio +async def test_delete_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_control + ] = mock_object + + request = {} + await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_control_async( + transport: str = "grpc_asyncio", request_type=control_service.DeleteControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.DeleteControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_control_async_from_dict(): + await test_delete_control_async(request_type=dict) + + +def test_delete_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.DeleteControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value = None + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.DeleteControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.UpdateControlRequest, + dict, + ], +) +def test_update_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.UpdateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_update_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +def test_update_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.UpdateControlRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +def test_update_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_control] = mock_rpc + request = {} + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.update_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +@pytest.mark.asyncio +async def test_update_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_control + ] = mock_object + + request = {} + await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_control_async( + transport: str = "grpc_asyncio", request_type=control_service.UpdateControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.UpdateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_update_control_async_from_dict(): + await test_update_control_async(request_type=dict) + + +def test_update_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.UpdateControlRequest() + + request.control.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value = gcd_control.Control() + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "control.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.UpdateControlRequest() + + request.control.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "control.name=name_value", + ) in kw["metadata"] + + +def test_update_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_control( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_control( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.GetControlRequest, + dict, + ], +) +def test_get_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.GetControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_get_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest() + + +def test_get_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.GetControlRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest( + name="name_value", + ) + + +def test_get_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_control] = mock_rpc + request = {} + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.get_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest() + + +@pytest.mark.asyncio +async def test_get_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_control + ] = mock_object + + request = {} + await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_control_async( + transport: str = "grpc_asyncio", request_type=control_service.GetControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.GetControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_get_control_async_from_dict(): + await test_get_control_async(request_type=dict) + + +def test_get_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.GetControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value = control.Control() + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.GetControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(control.Control()) + await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.ListControlsRequest, + dict, + ], +) +def test_list_controls(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.ListControlsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_controls_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_controls() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest() + + +def test_list_controls_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.ListControlsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_controls(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_controls_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_controls in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_controls] = mock_rpc + request = {} + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_controls_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_controls() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest() + + +@pytest.mark.asyncio +async def test_list_controls_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_controls + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_controls + ] = mock_object + + request = {} + await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_controls_async( + transport: str = "grpc_asyncio", request_type=control_service.ListControlsRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.ListControlsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_controls_async_from_dict(): + await test_list_controls_async(request_type=dict) + + +def test_list_controls_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.ListControlsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value = control_service.ListControlsResponse() + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_controls_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.ListControlsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse() + ) + await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_controls_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_controls( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_controls_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_controls_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_controls( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_controls_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +def test_list_controls_pager(transport_name: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_controls(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, control.Control) for i in results) + + +def test_list_controls_pages(transport_name: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + pages = list(client.list_controls(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_controls_async_pager(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_controls), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_controls( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, control.Control) for i in responses) + + +@pytest.mark.asyncio +async def test_list_controls_async_pages(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_controls), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_controls(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.CreateControlRequest, + dict, + ], +) +def test_create_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["control"] = { + "boost_action": { + "boost": 0.551, + "filter": "filter_value", + "data_store": "data_store_value", + }, + "filter_action": {"filter": "filter_value", "data_store": "data_store_value"}, + "redirect_action": {"redirect_uri": "redirect_uri_value"}, + "synonyms_action": {"synonyms": ["synonyms_value1", "synonyms_value2"]}, + "name": "name_value", + "display_name": "display_name_value", + "associated_serving_config_ids": [ + "associated_serving_config_ids_value1", + "associated_serving_config_ids_value2", + ], + "solution_type": 1, + "use_cases": [1], + "conditions": [ + { + "query_terms": [{"value": "value_value", "full_match": True}], + "active_time_range": [ + {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = control_service.CreateControlRequest.meta.fields["control"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["control"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["control"][field])): + del request_init["control"][field][i][subfield] + else: + del request_init["control"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_create_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_control] = mock_rpc + + request = {} + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_control_rest_required_fields( + request_type=control_service.CreateControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["control_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "controlId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "controlId" in jsonified_request + assert jsonified_request["controlId"] == request_init["control_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["controlId"] = "control_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_control._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("control_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "controlId" in jsonified_request + assert jsonified_request["controlId"] == "control_id_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_control(request) + + expected_params = [ + ( + "controlId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_control._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("controlId",)) + & set( + ( + "parent", + "control", + "controlId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_create_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_create_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.CreateControlRequest.pb( + control_service.CreateControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_control.Control.to_json(gcd_control.Control()) + + request = control_service.CreateControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_control.Control() + + client.create_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_control_rest_bad_request( + transport: str = "rest", request_type=control_service.CreateControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_control(request) + + +def test_create_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*}/controls" + % client.transport._host, + args[1], + ) + + +def test_create_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +def test_create_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.DeleteControlRequest, + dict, + ], +) +def test_delete_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_control(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_control] = mock_rpc + + request = {} + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_control_rest_required_fields( + request_type=control_service.DeleteControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_delete_control" + ) as pre: + pre.assert_not_called() + pb_message = control_service.DeleteControlRequest.pb( + control_service.DeleteControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = control_service.DeleteControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_control_rest_bad_request( + transport: str = "rest", request_type=control_service.DeleteControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_control(request) + + +def test_delete_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +def test_delete_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.UpdateControlRequest, + dict, + ], +) +def test_update_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + request_init["control"] = { + "boost_action": { + "boost": 0.551, + "filter": "filter_value", + "data_store": "data_store_value", + }, + "filter_action": {"filter": "filter_value", "data_store": "data_store_value"}, + "redirect_action": {"redirect_uri": "redirect_uri_value"}, + "synonyms_action": {"synonyms": ["synonyms_value1", "synonyms_value2"]}, + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4", + "display_name": "display_name_value", + "associated_serving_config_ids": [ + "associated_serving_config_ids_value1", + "associated_serving_config_ids_value2", + ], + "solution_type": 1, + "use_cases": [1], + "conditions": [ + { + "query_terms": [{"value": "value_value", "full_match": True}], + "active_time_range": [ + {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = control_service.UpdateControlRequest.meta.fields["control"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["control"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["control"][field])): + del request_init["control"][field][i][subfield] + else: + del request_init["control"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_update_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_control] = mock_rpc + + request = {} + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_control_rest_required_fields( + request_type=control_service.UpdateControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_control._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("control",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_update_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_update_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.UpdateControlRequest.pb( + control_service.UpdateControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_control.Control.to_json(gcd_control.Control()) + + request = control_service.UpdateControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_control.Control() + + client.update_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_control_rest_bad_request( + transport: str = "rest", request_type=control_service.UpdateControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_control(request) + + +def test_update_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{control.name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_update_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.GetControlRequest, + dict, + ], +) +def test_get_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_get_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_control] = mock_rpc + + request = {} + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_control_rest_required_fields( + request_type=control_service.GetControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_get_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_get_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.GetControlRequest.pb( + control_service.GetControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control.Control.to_json(control.Control()) + + request = control_service.GetControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control.Control() + + client.get_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_control_rest_bad_request( + transport: str = "rest", request_type=control_service.GetControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_control(request) + + +def test_get_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_get_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +def test_get_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.ListControlsRequest, + dict, + ], +) +def test_list_controls_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_controls(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_controls_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_controls in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_controls] = mock_rpc + + request = {} + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_controls_rest_required_fields( + request_type=control_service.ListControlsRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_controls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_controls._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_controls(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_controls_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_controls._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_controls_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_list_controls" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_list_controls" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.ListControlsRequest.pb( + control_service.ListControlsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control_service.ListControlsResponse.to_json( + control_service.ListControlsResponse() + ) + + request = control_service.ListControlsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control_service.ListControlsResponse() + + client.list_controls( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_controls_rest_bad_request( + transport: str = "rest", request_type=control_service.ListControlsRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_controls(request) + + +def test_list_controls_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_controls(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*}/controls" + % client.transport._host, + args[1], + ) + + +def test_list_controls_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +def test_list_controls_rest_pager(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + control_service.ListControlsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_controls(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, control.Control) for i in results) + + pages = list(client.list_controls(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ControlServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ControlServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + transports.ControlServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ControlServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ControlServiceGrpcTransport, + ) + + +def test_control_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ControlServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_control_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1alpha.services.control_service.transports.ControlServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ControlServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_control", + "delete_control", + "update_control", + "get_control", + "list_controls", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_control_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1alpha.services.control_service.transports.ControlServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ControlServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_control_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1alpha.services.control_service.transports.ControlServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ControlServiceTransport() + adc.assert_called_once() + + +def test_control_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ControlServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + transports.ControlServiceRestTransport, + ], +) +def test_control_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ControlServiceGrpcTransport, grpc_helpers), + (transports.ControlServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_control_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_control_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ControlServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_control_service_host_no_port(transport_name): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_control_service_host_with_port(transport_name): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_control_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ControlServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ControlServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_control._session + session2 = client2.transport.create_control._session + assert session1 != session2 + session1 = client1.transport.delete_control._session + session2 = client2.transport.delete_control._session + assert session1 != session2 + session1 = client1.transport.update_control._session + session2 = client2.transport.update_control._session + assert session1 != session2 + session1 = client1.transport.get_control._session + session2 = client2.transport.get_control._session + assert session1 != session2 + session1 = client1.transport.list_controls._session + session2 = client2.transport.list_controls._session + assert session1 != session2 + + +def test_control_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ControlServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_control_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ControlServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_control_path(): + project = "squid" + location = "clam" + data_store = "whelk" + control = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/controls/{control}".format( + project=project, + location=location, + data_store=data_store, + control=control, + ) + actual = ControlServiceClient.control_path(project, location, data_store, control) + assert expected == actual + + +def test_parse_control_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "control": "mussel", + } + path = ControlServiceClient.control_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_control_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = ControlServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + } + path = ControlServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ControlServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ControlServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ControlServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ControlServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ControlServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ControlServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ControlServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ControlServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ControlServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ControlServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ControlServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ControlServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ControlServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport), + (ControlServiceAsyncClient, transports.ControlServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py index d76f39fbb60d..16690533c587 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py @@ -3642,13 +3642,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3821,7 +3821,9 @@ def test_answer_query(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.answer_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = conversational_search_service.AnswerQueryResponse() + call.return_value = conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) response = client.answer_query(request) # Establish that the underlying gRPC stub method was called. @@ -3832,6 +3834,7 @@ def test_answer_query(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" def test_answer_query_empty_call(): @@ -3933,7 +3936,9 @@ async def test_answer_query_empty_call_async(): with mock.patch.object(type(client.transport.answer_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - conversational_search_service.AnswerQueryResponse() + conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) ) response = await client.answer_query() call.assert_called() @@ -4005,7 +4010,9 @@ async def test_answer_query_async( with mock.patch.object(type(client.transport.answer_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - conversational_search_service.AnswerQueryResponse() + conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) ) response = await client.answer_query(request) @@ -4017,6 +4024,7 @@ async def test_answer_query_async( # Establish that the response is the type that we expect. assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" @pytest.mark.asyncio @@ -6390,13 +6398,13 @@ def test_list_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8835,7 +8843,9 @@ def test_answer_query_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversational_search_service.AnswerQueryResponse() + return_value = conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -8852,6 +8862,7 @@ def test_answer_query_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" def test_answer_query_rest_use_cached_wrapped_rpc(): @@ -11346,6 +11357,7 @@ def test_conversational_search_service_base_transport(): "get_session", "list_sessions", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -12149,6 +12161,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -12275,6 +12350,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index f7658431edda..52367e8861bf 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -2393,13 +2393,13 @@ def test_list_data_stores_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_stores(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4231,6 +4231,10 @@ def test_create_data_store_rest(request_type): "recs_filterable_option": 1, "key_property_type": "key_property_type_value", "advanced_site_search_data_sources": [1], + "schema_org_paths": [ + "schema_org_paths_value1", + "schema_org_paths_value2", + ], } ], }, @@ -5691,6 +5695,10 @@ def test_update_data_store_rest(request_type): "recs_filterable_option": 1, "key_property_type": "key_property_type_value", "advanced_site_search_data_sources": [1], + "schema_org_paths": [ + "schema_org_paths_value1", + "schema_org_paths_value2", + ], } ], }, @@ -6978,6 +6986,7 @@ def test_data_store_service_base_transport(): "get_document_processing_config", "update_document_processing_config", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -7693,6 +7702,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -7819,6 +7891,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = DataStoreServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index a4419b61af32..cb62c19e559a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -1951,13 +1951,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7202,6 +7202,7 @@ def test_document_service_base_transport(): "purge_documents", "get_processed_document", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -7903,6 +7904,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -8029,6 +8093,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py index 5c8dc8fcc857..e3e316dd93d0 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py @@ -3073,13 +3073,13 @@ def test_list_engines_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_engines(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7309,6 +7309,7 @@ def test_engine_service_base_transport(): "resume_engine", "tune_engine", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -7971,6 +7972,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -8097,6 +8161,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = EngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py index 1cb7d63c5346..f7ca66f47798 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py @@ -1899,6 +1899,7 @@ def test_estimate_billing_service_base_transport(): methods = ( "estimate_data_size", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2512,6 +2513,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = EstimateBillingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = EstimateBillingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2638,6 +2702,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = EstimateBillingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = EstimateBillingServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = EstimateBillingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = EstimateBillingServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = EstimateBillingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = EstimateBillingServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = EstimateBillingServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py index 76d1100282cc..53bb9dd42f43 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py @@ -1893,6 +1893,7 @@ def test_grounded_generation_service_base_transport(): methods = ( "check_grounding", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2457,6 +2458,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2583,6 +2647,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = GroundedGenerationServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py index 7cf26554e666..bb8d431a92ad 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py @@ -3399,6 +3399,7 @@ def test_project_service_base_transport(): "provision_project", "report_consent_change", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -4011,6 +4012,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -4137,6 +4201,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py index dc9fc770dd41..6fa7d8ef3bad 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py @@ -1753,6 +1753,7 @@ def test_rank_service_base_transport(): methods = ( "rank", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2298,6 +2299,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2424,6 +2488,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = RankServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py index fb3fdcd74a83..caf9dbb36df6 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py @@ -1896,6 +1896,7 @@ def test_recommendation_service_base_transport(): methods = ( "recommend", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2288,12 +2289,38 @@ def test_recommendation_service_transport_channel_mtls_with_adc(transport_class) assert transport.grpc_channel == mock_grpc_channel -def test_document_path(): +def test_data_store_path(): project = "squid" location = "clam" data_store = "whelk" - branch = "octopus" - document = "oyster" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = RecommendationServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = RecommendationServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = RecommendationServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_document_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + branch = "nautilus" + document = "scallop" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( project=project, location=location, @@ -2309,11 +2336,11 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "data_store": "mussel", - "branch": "winkle", - "document": "nautilus", + "project": "abalone", + "location": "squid", + "data_store": "clam", + "branch": "whelk", + "document": "octopus", } path = RecommendationServiceClient.document_path(**expected) @@ -2322,11 +2349,42 @@ def test_parse_document_path(): assert expected == actual +def test_engine_path(): + project = "oyster" + location = "nudibranch" + collection = "cuttlefish" + engine = "mussel" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = RecommendationServiceClient.engine_path( + project, location, collection, engine + ) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "collection": "scallop", + "engine": "abalone", + } + path = RecommendationServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = RecommendationServiceClient.parse_engine_path(path) + assert expected == actual + + def test_serving_config_path(): - project = "scallop" - location = "abalone" - data_store = "squid" - serving_config = "clam" + project = "squid" + location = "clam" + data_store = "whelk" + serving_config = "octopus" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( project=project, location=location, @@ -2341,10 +2399,10 @@ def test_serving_config_path(): def test_parse_serving_config_path(): expected = { - "project": "whelk", - "location": "octopus", - "data_store": "oyster", - "serving_config": "nudibranch", + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "serving_config": "mussel", } path = RecommendationServiceClient.serving_config_path(**expected) @@ -2354,7 +2412,7 @@ def test_parse_serving_config_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2364,7 +2422,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nautilus", } path = RecommendationServiceClient.common_billing_account_path(**expected) @@ -2374,7 +2432,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -2384,7 +2442,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "abalone", } path = RecommendationServiceClient.common_folder_path(**expected) @@ -2394,7 +2452,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -2404,7 +2462,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "clam", } path = RecommendationServiceClient.common_organization_path(**expected) @@ -2414,7 +2472,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -2424,7 +2482,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "octopus", } path = RecommendationServiceClient.common_project_path(**expected) @@ -2434,8 +2492,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2446,8 +2504,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "cuttlefish", + "location": "mussel", } path = RecommendationServiceClient.common_location_path(**expected) @@ -2493,6 +2551,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2619,6 +2740,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = RecommendationServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py index 96a5352327e5..be8374f55a62 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py @@ -1899,13 +1899,13 @@ def test_list_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3859,6 +3859,10 @@ def test_create_schema_rest(request_type): "recs_filterable_option": 1, "key_property_type": "key_property_type_value", "advanced_site_search_data_sources": [1], + "schema_org_paths": [ + "schema_org_paths_value1", + "schema_org_paths_value2", + ], } ], } @@ -4295,6 +4299,10 @@ def test_update_schema_rest(request_type): "recs_filterable_option": 1, "key_property_type": "key_property_type_value", "advanced_site_search_data_sources": [1], + "schema_org_paths": [ + "schema_org_paths_value1", + "schema_org_paths_value2", + ], } ], } @@ -5046,6 +5054,7 @@ def test_schema_service_base_transport(): "update_schema", "delete_schema", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -5697,6 +5706,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -5823,6 +5895,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py index 305aa79487f6..90b9d5c59c40 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py @@ -1478,13 +1478,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("serving_config", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2090,6 +2090,7 @@ def test_search_service_base_transport(): methods = ( "search", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2775,6 +2776,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2901,6 +2965,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py index faf8c2573b3f..722c3cd1e947 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py @@ -59,6 +59,7 @@ transports, ) from google.cloud.discoveryengine_v1alpha.types import ( + custom_tuning_model, import_config, search_tuning_service, ) @@ -1267,6 +1268,7 @@ def test_train_custom_model_non_empty_request_with_auto_populated_field(): request = search_tuning_service.TrainCustomModelRequest( data_store="data_store_value", model_type="model_type_value", + model_id="model_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1282,6 +1284,7 @@ def test_train_custom_model_non_empty_request_with_auto_populated_field(): assert args[0] == search_tuning_service.TrainCustomModelRequest( data_store="data_store_value", model_type="model_type_value", + model_id="model_id_value", ) @@ -1505,6 +1508,304 @@ async def test_train_custom_model_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = search_tuning_service.ListCustomModelsResponse() + response = client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +def test_list_custom_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + +def test_list_custom_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +@pytest.mark.asyncio +async def test_list_custom_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_custom_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_custom_models + ] = mock_object + + request = {} + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.ListCustomModelsRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +@pytest.mark.asyncio +async def test_list_custom_models_async_from_dict(): + await test_list_custom_models_async(request_type=dict) + + +def test_list_custom_models_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = search_tuning_service.ListCustomModelsResponse() + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_custom_models_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1512,7 +1813,260 @@ async def test_train_custom_model_field_headers_async(): dict, ], ) -def test_train_custom_model_rest(request_type): +def test_train_custom_model_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_custom_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_custom_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_train_custom_model_rest_required_fields( + request_type=search_tuning_service.TrainCustomModelRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_custom_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_custom_model_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_custom_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_custom_model_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.TrainCustomModelRequest.pb( + search_tuning_service.TrainCustomModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = search_tuning_service.TrainCustomModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_custom_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_custom_model_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_custom_model(request) + + +def test_train_custom_model_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models_rest(request_type): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1527,22 +2081,24 @@ def test_train_custom_model_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = search_tuning_service.ListCustomModelsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.train_custom_model(request) + response = client.list_custom_models(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) -def test_train_custom_model_rest_use_cached_wrapped_rpc(): +def test_list_custom_models_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1557,7 +2113,7 @@ def test_train_custom_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.train_custom_model in client._transport._wrapped_methods + client._transport.list_custom_models in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -1566,28 +2122,24 @@ def test_train_custom_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.train_custom_model + client._transport.list_custom_models ] = mock_rpc request = {} - client.train_custom_model(request) + client.list_custom_models(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.train_custom_model(request) + client.list_custom_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_train_custom_model_rest_required_fields( - request_type=search_tuning_service.TrainCustomModelRequest, +def test_list_custom_models_rest_required_fields( + request_type=search_tuning_service.ListCustomModelsRequest, ): transport_class = transports.SearchTuningServiceRestTransport @@ -1603,7 +2155,7 @@ def test_train_custom_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).train_custom_model._get_unset_required_fields(jsonified_request) + ).list_custom_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1612,7 +2164,7 @@ def test_train_custom_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).train_custom_model._get_unset_required_fields(jsonified_request) + ).list_custom_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1626,7 +2178,7 @@ def test_train_custom_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = search_tuning_service.ListCustomModelsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1638,37 +2190,41 @@ def test_train_custom_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.train_custom_model(request) + response = client.list_custom_models(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_train_custom_model_rest_unset_required_fields(): +def test_list_custom_models_rest_unset_required_fields(): transport = transports.SearchTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.train_custom_model._get_unset_required_fields({}) + unset_fields = transport.list_custom_models._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("dataStore",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_train_custom_model_rest_interceptors(null_interceptor): +def test_list_custom_models_rest_interceptors(null_interceptor): transport = transports.SearchTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1681,16 +2237,14 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" ) as post, mock.patch.object( - transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = search_tuning_service.TrainCustomModelRequest.pb( - search_tuning_service.TrainCustomModelRequest() + pb_message = search_tuning_service.ListCustomModelsRequest.pb( + search_tuning_service.ListCustomModelsRequest() ) transcode.return_value = { "method": "post", @@ -1702,19 +2256,21 @@ def test_train_custom_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + search_tuning_service.ListCustomModelsResponse.to_json( + search_tuning_service.ListCustomModelsResponse() + ) ) - request = search_tuning_service.TrainCustomModelRequest() + request = search_tuning_service.ListCustomModelsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = search_tuning_service.ListCustomModelsResponse() - client.train_custom_model( + client.list_custom_models( request, metadata=[ ("key", "val"), @@ -1726,8 +2282,8 @@ def test_train_custom_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_train_custom_model_rest_bad_request( - transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +def test_list_custom_models_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.ListCustomModelsRequest ): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1749,10 +2305,10 @@ def test_train_custom_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.train_custom_model(request) + client.list_custom_models(request) -def test_train_custom_model_rest_error(): +def test_list_custom_models_rest_error(): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1898,7 +2454,9 @@ def test_search_tuning_service_base_transport(): # raise NotImplementedError. methods = ( "train_custom_model", + "list_custom_models", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2185,6 +2743,9 @@ def test_search_tuning_service_client_transport_session_collision(transport_name session1 = client1.transport.train_custom_model._session session2 = client2.transport.train_custom_model._session assert session1 != session2 + session1 = client1.transport.list_custom_models._session + session2 = client2.transport.list_custom_models._session + assert session1 != session2 def test_search_tuning_service_grpc_transport_channel(): @@ -2347,10 +2908,41 @@ def test_search_tuning_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_data_store_path(): +def test_custom_tuning_model_path(): project = "squid" location = "clam" data_store = "whelk" + custom_tuning_model = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + actual = SearchTuningServiceClient.custom_tuning_model_path( + project, location, data_store, custom_tuning_model + ) + assert expected == actual + + +def test_parse_custom_tuning_model_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "custom_tuning_model": "mussel", + } + path = SearchTuningServiceClient.custom_tuning_model_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_custom_tuning_model_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( project=project, location=location, @@ -2362,9 +2954,9 @@ def test_data_store_path(): def test_parse_data_store_path(): expected = { - "project": "octopus", - "location": "oyster", - "data_store": "nudibranch", + "project": "abalone", + "location": "squid", + "data_store": "clam", } path = SearchTuningServiceClient.data_store_path(**expected) @@ -2374,7 +2966,7 @@ def test_parse_data_store_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2384,7 +2976,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = SearchTuningServiceClient.common_billing_account_path(**expected) @@ -2394,7 +2986,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -2404,7 +2996,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = SearchTuningServiceClient.common_folder_path(**expected) @@ -2414,7 +3006,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -2424,7 +3016,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = SearchTuningServiceClient.common_organization_path(**expected) @@ -2434,7 +3026,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -2444,7 +3036,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = SearchTuningServiceClient.common_project_path(**expected) @@ -2454,8 +3046,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2466,8 +3058,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = SearchTuningServiceClient.common_location_path(**expected) @@ -2513,6 +3105,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2639,6 +3294,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py index c2998d9c58c2..230ac1567aa3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py @@ -2571,13 +2571,13 @@ def test_list_serving_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_serving_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4109,6 +4109,7 @@ def test_serving_config_service_base_transport(): "get_serving_config", "list_serving_configs", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -4678,6 +4679,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -4804,6 +4868,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ServingConfigServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py index e90a6f21d93d..b3ec4cbf4783 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py @@ -2332,6 +2332,7 @@ def test_get_target_site(request_type, transport: str = "grpc"): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) response = client.get_target_site(request) @@ -2349,6 +2350,7 @@ def test_get_target_site(request_type, transport: str = "grpc"): assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -2455,6 +2457,7 @@ async def test_get_target_site_empty_call_async(): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) ) @@ -2534,6 +2537,7 @@ async def test_get_target_site_async( type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) ) @@ -2552,6 +2556,7 @@ async def test_get_target_site_async( assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -3921,13 +3926,13 @@ def test_list_target_sites_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_target_sites(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5652,13 +5657,13 @@ def test_fetch_domain_verification_status_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("site_search_engine", ""),)), ) pager = client.fetch_domain_verification_status(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6149,6 +6154,7 @@ def test_create_target_site_rest(request_type): "type_": 1, "exact_match": True, "generated_uri_pattern": "generated_uri_pattern_value", + "root_domain_uri": "root_domain_uri_value", "site_verification_info": { "site_verification_state": 1, "verify_time": {"seconds": 751, "nanos": 543}, @@ -6823,6 +6829,7 @@ def test_get_target_site_rest(request_type): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) @@ -6844,6 +6851,7 @@ def test_get_target_site_rest(request_type): assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -7142,6 +7150,7 @@ def test_update_target_site_rest(request_type): "type_": 1, "exact_match": True, "generated_uri_pattern": "generated_uri_pattern_value", + "root_domain_uri": "root_domain_uri_value", "site_verification_info": { "site_verification_state": 1, "verify_time": {"seconds": 751, "nanos": 543}, @@ -9754,6 +9763,7 @@ def test_site_search_engine_service_base_transport(): "batch_verify_target_sites", "fetch_domain_verification_status", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -10436,6 +10446,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -10562,6 +10635,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index 7727da22c9e3..0005eb01414c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -1190,6 +1190,8 @@ def test_write_user_event(request_type, transport: str = "grpc"): call.return_value = user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1209,6 +1211,8 @@ def test_write_user_event(request_type, transport: str = "grpc"): assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -1317,6 +1321,8 @@ async def test_write_user_event_empty_call_async(): user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1398,6 +1404,8 @@ async def test_write_user_event_async( user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1418,6 +1426,8 @@ async def test_write_user_event_async( assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -2435,6 +2445,8 @@ def test_write_user_event_rest(request_type): request_init["user_event"] = { "event_type": "event_type_value", "user_pseudo_id": "user_pseudo_id_value", + "engine": "engine_value", + "data_store": "data_store_value", "event_time": {"seconds": 751, "nanos": 543}, "user_info": {"user_id": "user_id_value", "user_agent": "user_agent_value"}, "direct_user_request": True, @@ -2562,6 +2574,8 @@ def get_message_fields(field): return_value = user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -2585,6 +2599,8 @@ def get_message_fields(field): assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -2658,6 +2674,8 @@ def test_write_user_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).write_user_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("write_async",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2713,7 +2731,7 @@ def test_write_user_event_rest_unset_required_fields(): unset_fields = transport.write_user_event._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("writeAsync",)) & set( ( "parent", @@ -3741,6 +3759,7 @@ def test_user_event_service_base_transport(): "purge_user_events", "import_user_events", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -4256,8 +4275,37 @@ def test_parse_document_path(): assert expected == actual +def test_engine_path(): + project = "oyster" + location = "nudibranch" + collection = "cuttlefish" + engine = "mussel" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = UserEventServiceClient.engine_path(project, location, collection, engine) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "collection": "scallop", + "engine": "abalone", + } + path = UserEventServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_engine_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4267,7 +4315,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = UserEventServiceClient.common_billing_account_path(**expected) @@ -4277,7 +4325,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -4287,7 +4335,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = UserEventServiceClient.common_folder_path(**expected) @@ -4297,7 +4345,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -4307,7 +4355,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = UserEventServiceClient.common_organization_path(**expected) @@ -4317,7 +4365,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -4327,7 +4375,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = UserEventServiceClient.common_project_path(**expected) @@ -4337,8 +4385,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4349,8 +4397,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = UserEventServiceClient.common_location_path(**expected) @@ -4396,6 +4444,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -4522,6 +4633,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = UserEventServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py index 72ca2945613b..b365413ca463 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py @@ -3025,6 +3025,7 @@ def test_completion_service_base_transport(): "import_suggestion_deny_list_entries", "purge_suggestion_deny_list_entries", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3643,6 +3644,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3769,6 +3833,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = CompletionServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py new file mode 100644 index 000000000000..9daf64298d50 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py @@ -0,0 +1,6535 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.control_service import ( + ControlServiceAsyncClient, + ControlServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import control +from google.cloud.discoveryengine_v1beta.types import control as gcd_control +from google.cloud.discoveryengine_v1beta.types import control_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ControlServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ControlServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ControlServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ControlServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ControlServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ControlServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ControlServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ControlServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ControlServiceClient._get_client_cert_source(None, False) is None + assert ( + ControlServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ControlServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ControlServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ControlServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + default_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ControlServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ControlServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ControlServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ControlServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ControlServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ControlServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ControlServiceClient._get_universe_domain(None, None) + == ControlServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ControlServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ControlServiceClient, "grpc"), + (ControlServiceAsyncClient, "grpc_asyncio"), + (ControlServiceClient, "rest"), + ], +) +def test_control_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ControlServiceGrpcTransport, "grpc"), + (transports.ControlServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ControlServiceRestTransport, "rest"), + ], +) +def test_control_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ControlServiceClient, "grpc"), + (ControlServiceAsyncClient, "grpc_asyncio"), + (ControlServiceClient, "rest"), + ], +) +def test_control_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_control_service_client_get_transport_class(): + transport = ControlServiceClient.get_transport_class() + available_transports = [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceRestTransport, + ] + assert transport in available_transports + + transport = ControlServiceClient.get_transport_class("grpc") + assert transport == transports.ControlServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test_control_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ControlServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ControlServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc", "true"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc", "false"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", "true"), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_control_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ControlServiceClient, ControlServiceAsyncClient] +) +@mock.patch.object( + ControlServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ControlServiceAsyncClient), +) +def test_control_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ControlServiceClient, ControlServiceAsyncClient] +) +@mock.patch.object( + ControlServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceClient), +) +@mock.patch.object( + ControlServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ControlServiceAsyncClient), +) +def test_control_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ControlServiceClient._DEFAULT_UNIVERSE + default_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ControlServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport, "grpc"), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest"), + ], +) +def test_control_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ControlServiceClient, + transports.ControlServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ControlServiceClient, transports.ControlServiceRestTransport, "rest", None), + ], +) +def test_control_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_control_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.control_service.transports.ControlServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ControlServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ControlServiceClient, + transports.ControlServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ControlServiceAsyncClient, + transports.ControlServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_control_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.CreateControlRequest, + dict, + ], +) +def test_create_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.CreateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_create_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest() + + +def test_create_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.CreateControlRequest( + parent="parent_value", + control_id="control_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest( + parent="parent_value", + control_id="control_id_value", + ) + + +def test_create_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_control] = mock_rpc + request = {} + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.create_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.CreateControlRequest() + + +@pytest.mark.asyncio +async def test_create_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_control + ] = mock_object + + request = {} + await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_control_async( + transport: str = "grpc_asyncio", request_type=control_service.CreateControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.CreateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_create_control_async_from_dict(): + await test_create_control_async(request_type=dict) + + +def test_create_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.CreateControlRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value = gcd_control.Control() + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.CreateControlRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + await client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_control( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].control_id + mock_val = "control_id_value" + assert arg == mock_val + + +def test_create_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_control( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].control_id + mock_val = "control_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.DeleteControlRequest, + dict, + ], +) +def test_delete_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.DeleteControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest() + + +def test_delete_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.DeleteControlRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest( + name="name_value", + ) + + +def test_delete_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_control] = mock_rpc + request = {} + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.DeleteControlRequest() + + +@pytest.mark.asyncio +async def test_delete_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_control + ] = mock_object + + request = {} + await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_control_async( + transport: str = "grpc_asyncio", request_type=control_service.DeleteControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.DeleteControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_control_async_from_dict(): + await test_delete_control_async(request_type=dict) + + +def test_delete_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.DeleteControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value = None + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.DeleteControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.UpdateControlRequest, + dict, + ], +) +def test_update_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.UpdateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_update_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +def test_update_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.UpdateControlRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +def test_update_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_control] = mock_rpc + request = {} + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.update_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.UpdateControlRequest() + + +@pytest.mark.asyncio +async def test_update_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_control + ] = mock_object + + request = {} + await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_control_async( + transport: str = "grpc_asyncio", request_type=control_service.UpdateControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.UpdateControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_update_control_async_from_dict(): + await test_update_control_async(request_type=dict) + + +def test_update_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.UpdateControlRequest() + + request.control.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value = gcd_control.Control() + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "control.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.UpdateControlRequest() + + request.control.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + await client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "control.name=name_value", + ) in kw["metadata"] + + +def test_update_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_control( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_control( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].control + mock_val = gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.GetControlRequest, + dict, + ], +) +def test_get_control(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + response = client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.GetControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_get_control_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest() + + +def test_get_control_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.GetControlRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_control(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest( + name="name_value", + ) + + +def test_get_control_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_control] = mock_rpc + request = {} + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_control_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.get_control() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.GetControlRequest() + + +@pytest.mark.asyncio +async def test_get_control_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_control + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_control + ] = mock_object + + request = {} + await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_control_async( + transport: str = "grpc_asyncio", request_type=control_service.GetControlRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + ) + response = await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.GetControlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +@pytest.mark.asyncio +async def test_get_control_async_from_dict(): + await test_get_control_async(request_type=dict) + + +def test_get_control_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.GetControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value = control.Control() + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_control_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.GetControlRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(control.Control()) + await client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_control_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_control_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_control_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_control), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control.Control() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(control.Control()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_control( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_control_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.ListControlsRequest, + dict, + ], +) +def test_list_controls(request_type, transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control_service.ListControlsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_controls_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_controls() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest() + + +def test_list_controls_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control_service.ListControlsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_controls(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_controls_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_controls in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_controls] = mock_rpc + request = {} + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_controls_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_controls() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control_service.ListControlsRequest() + + +@pytest.mark.asyncio +async def test_list_controls_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_controls + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_controls + ] = mock_object + + request = {} + await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_controls_async( + transport: str = "grpc_asyncio", request_type=control_service.ListControlsRequest +): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control_service.ListControlsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_controls_async_from_dict(): + await test_list_controls_async(request_type=dict) + + +def test_list_controls_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.ListControlsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value = control_service.ListControlsResponse() + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_controls_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control_service.ListControlsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse() + ) + await client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_controls_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_controls( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_controls_flattened_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_controls_flattened_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = control_service.ListControlsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control_service.ListControlsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_controls( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_controls_flattened_error_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +def test_list_controls_pager(transport_name: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_controls(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, control.Control) for i in results) + + +def test_list_controls_pages(transport_name: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_controls), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + pages = list(client.list_controls(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_controls_async_pager(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_controls), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_controls( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, control.Control) for i in responses) + + +@pytest.mark.asyncio +async def test_list_controls_async_pages(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_controls), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_controls(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.CreateControlRequest, + dict, + ], +) +def test_create_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["control"] = { + "boost_action": { + "boost": 0.551, + "filter": "filter_value", + "data_store": "data_store_value", + }, + "filter_action": {"filter": "filter_value", "data_store": "data_store_value"}, + "redirect_action": {"redirect_uri": "redirect_uri_value"}, + "synonyms_action": {"synonyms": ["synonyms_value1", "synonyms_value2"]}, + "name": "name_value", + "display_name": "display_name_value", + "associated_serving_config_ids": [ + "associated_serving_config_ids_value1", + "associated_serving_config_ids_value2", + ], + "solution_type": 1, + "use_cases": [1], + "conditions": [ + { + "query_terms": [{"value": "value_value", "full_match": True}], + "active_time_range": [ + {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = control_service.CreateControlRequest.meta.fields["control"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["control"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["control"][field])): + del request_init["control"][field][i][subfield] + else: + del request_init["control"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_create_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_control] = mock_rpc + + request = {} + client.create_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_control_rest_required_fields( + request_type=control_service.CreateControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["control_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "controlId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "controlId" in jsonified_request + assert jsonified_request["controlId"] == request_init["control_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["controlId"] = "control_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_control._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("control_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "controlId" in jsonified_request + assert jsonified_request["controlId"] == "control_id_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_control(request) + + expected_params = [ + ( + "controlId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_control._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("controlId",)) + & set( + ( + "parent", + "control", + "controlId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_create_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_create_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.CreateControlRequest.pb( + control_service.CreateControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_control.Control.to_json(gcd_control.Control()) + + request = control_service.CreateControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_control.Control() + + client.create_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_control_rest_bad_request( + transport: str = "rest", request_type=control_service.CreateControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_control(request) + + +def test_create_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*}/controls" + % client.transport._host, + args[1], + ) + + +def test_create_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_control( + control_service.CreateControlRequest(), + parent="parent_value", + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + control_id="control_id_value", + ) + + +def test_create_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.DeleteControlRequest, + dict, + ], +) +def test_delete_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_control(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_control] = mock_rpc + + request = {} + client.delete_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_control_rest_required_fields( + request_type=control_service.DeleteControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_delete_control" + ) as pre: + pre.assert_not_called() + pb_message = control_service.DeleteControlRequest.pb( + control_service.DeleteControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = control_service.DeleteControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_control_rest_bad_request( + transport: str = "rest", request_type=control_service.DeleteControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_control(request) + + +def test_delete_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_control( + control_service.DeleteControlRequest(), + name="name_value", + ) + + +def test_delete_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.UpdateControlRequest, + dict, + ], +) +def test_update_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + request_init["control"] = { + "boost_action": { + "boost": 0.551, + "filter": "filter_value", + "data_store": "data_store_value", + }, + "filter_action": {"filter": "filter_value", "data_store": "data_store_value"}, + "redirect_action": {"redirect_uri": "redirect_uri_value"}, + "synonyms_action": {"synonyms": ["synonyms_value1", "synonyms_value2"]}, + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4", + "display_name": "display_name_value", + "associated_serving_config_ids": [ + "associated_serving_config_ids_value1", + "associated_serving_config_ids_value2", + ], + "solution_type": 1, + "use_cases": [1], + "conditions": [ + { + "query_terms": [{"value": "value_value", "full_match": True}], + "active_time_range": [ + {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = control_service.UpdateControlRequest.meta.fields["control"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["control"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["control"][field])): + del request_init["control"][field][i][subfield] + else: + del request_init["control"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_update_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_control] = mock_rpc + + request = {} + client.update_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_control_rest_required_fields( + request_type=control_service.UpdateControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_control._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("control",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_update_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_update_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.UpdateControlRequest.pb( + control_service.UpdateControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_control.Control.to_json(gcd_control.Control()) + + request = control_service.UpdateControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_control.Control() + + client.update_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_control_rest_bad_request( + transport: str = "rest", request_type=control_service.UpdateControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_control(request) + + +def test_update_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "control": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{control.name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_update_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_control( + control_service.UpdateControlRequest(), + control=gcd_control.Control( + boost_action=gcd_control.Control.BoostAction(boost=0.551) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.GetControlRequest, + dict, + ], +) +def test_get_control_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.Control( + name="name_value", + display_name="display_name_value", + associated_serving_config_ids=["associated_serving_config_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + use_cases=[common.SearchUseCase.SEARCH_USE_CASE_SEARCH], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_control(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, control.Control) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.associated_serving_config_ids == [ + "associated_serving_config_ids_value" + ] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.use_cases == [common.SearchUseCase.SEARCH_USE_CASE_SEARCH] + + +def test_get_control_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_control in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_control] = mock_rpc + + request = {} + client.get_control(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_control(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_control_rest_required_fields( + request_type=control_service.GetControlRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_control._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control.Control() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_control(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_control_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_control._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_control_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_get_control" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_get_control" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.GetControlRequest.pb( + control_service.GetControlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control.Control.to_json(control.Control()) + + request = control_service.GetControlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control.Control() + + client.get_control( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_control_rest_bad_request( + transport: str = "rest", request_type=control_service.GetControlRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_control(request) + + +def test_get_control_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.Control() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/controls/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.Control.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_control(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/controls/*}" + % client.transport._host, + args[1], + ) + + +def test_get_control_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_control( + control_service.GetControlRequest(), + name="name_value", + ) + + +def test_get_control_rest_error(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control_service.ListControlsRequest, + dict, + ], +) +def test_list_controls_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_controls(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_controls_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_controls in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_controls] = mock_rpc + + request = {} + client.list_controls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_controls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_controls_rest_required_fields( + request_type=control_service.ListControlsRequest, +): + transport_class = transports.ControlServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_controls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_controls._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_controls(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_controls_rest_unset_required_fields(): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_controls._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_controls_rest_interceptors(null_interceptor): + transport = transports.ControlServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ControlServiceRestInterceptor(), + ) + client = ControlServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_list_controls" + ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "pre_list_controls" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control_service.ListControlsRequest.pb( + control_service.ListControlsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control_service.ListControlsResponse.to_json( + control_service.ListControlsResponse() + ) + + request = control_service.ListControlsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control_service.ListControlsResponse() + + client.list_controls( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_controls_rest_bad_request( + transport: str = "rest", request_type=control_service.ListControlsRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_controls(request) + + +def test_list_controls_rest_flattened(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control_service.ListControlsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control_service.ListControlsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_controls(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*}/controls" + % client.transport._host, + args[1], + ) + + +def test_list_controls_rest_flattened_error(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_controls( + control_service.ListControlsRequest(), + parent="parent_value", + ) + + +def test_list_controls_rest_pager(transport: str = "rest"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + control.Control(), + ], + next_page_token="abc", + ), + control_service.ListControlsResponse( + controls=[], + next_page_token="def", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + ], + next_page_token="ghi", + ), + control_service.ListControlsResponse( + controls=[ + control.Control(), + control.Control(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + control_service.ListControlsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_controls(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, control.Control) for i in results) + + pages = list(client.list_controls(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ControlServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ControlServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ControlServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ControlServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + transports.ControlServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ControlServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ControlServiceGrpcTransport, + ) + + +def test_control_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ControlServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_control_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.control_service.transports.ControlServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ControlServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_control", + "delete_control", + "update_control", + "get_control", + "list_controls", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_control_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.control_service.transports.ControlServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ControlServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_control_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.control_service.transports.ControlServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ControlServiceTransport() + adc.assert_called_once() + + +def test_control_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ControlServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + transports.ControlServiceRestTransport, + ], +) +def test_control_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ControlServiceGrpcTransport, grpc_helpers), + (transports.ControlServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_control_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_control_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ControlServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_control_service_host_no_port(transport_name): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_control_service_host_with_port(transport_name): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_control_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ControlServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ControlServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_control._session + session2 = client2.transport.create_control._session + assert session1 != session2 + session1 = client1.transport.delete_control._session + session2 = client2.transport.delete_control._session + assert session1 != session2 + session1 = client1.transport.update_control._session + session2 = client2.transport.update_control._session + assert session1 != session2 + session1 = client1.transport.get_control._session + session2 = client2.transport.get_control._session + assert session1 != session2 + session1 = client1.transport.list_controls._session + session2 = client2.transport.list_controls._session + assert session1 != session2 + + +def test_control_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ControlServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_control_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ControlServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ControlServiceGrpcTransport, + transports.ControlServiceGrpcAsyncIOTransport, + ], +) +def test_control_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_control_path(): + project = "squid" + location = "clam" + data_store = "whelk" + control = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/controls/{control}".format( + project=project, + location=location, + data_store=data_store, + control=control, + ) + actual = ControlServiceClient.control_path(project, location, data_store, control) + assert expected == actual + + +def test_parse_control_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "control": "mussel", + } + path = ControlServiceClient.control_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_control_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = ControlServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + } + path = ControlServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ControlServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ControlServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ControlServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ControlServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ControlServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ControlServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ControlServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ControlServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ControlServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ControlServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ControlServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ControlServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ControlServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ControlServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ControlServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ControlServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ControlServiceClient, transports.ControlServiceGrpcTransport), + (ControlServiceAsyncClient, transports.ControlServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py index bef4dd466269..ea443e90ee86 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py @@ -3642,13 +3642,13 @@ def test_list_conversations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3821,7 +3821,9 @@ def test_answer_query(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.answer_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = conversational_search_service.AnswerQueryResponse() + call.return_value = conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) response = client.answer_query(request) # Establish that the underlying gRPC stub method was called. @@ -3832,6 +3834,7 @@ def test_answer_query(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" def test_answer_query_empty_call(): @@ -3933,7 +3936,9 @@ async def test_answer_query_empty_call_async(): with mock.patch.object(type(client.transport.answer_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - conversational_search_service.AnswerQueryResponse() + conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) ) response = await client.answer_query() call.assert_called() @@ -4005,7 +4010,9 @@ async def test_answer_query_async( with mock.patch.object(type(client.transport.answer_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - conversational_search_service.AnswerQueryResponse() + conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) ) response = await client.answer_query(request) @@ -4017,6 +4024,7 @@ async def test_answer_query_async( # Establish that the response is the type that we expect. assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" @pytest.mark.asyncio @@ -6390,13 +6398,13 @@ def test_list_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8835,7 +8843,9 @@ def test_answer_query_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversational_search_service.AnswerQueryResponse() + return_value = conversational_search_service.AnswerQueryResponse( + answer_query_token="answer_query_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -8852,6 +8862,7 @@ def test_answer_query_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, conversational_search_service.AnswerQueryResponse) + assert response.answer_query_token == "answer_query_token_value" def test_answer_query_rest_use_cached_wrapped_rpc(): @@ -11346,6 +11357,7 @@ def test_conversational_search_service_base_transport(): "get_session", "list_sessions", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -12149,6 +12161,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -12275,6 +12350,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ConversationalSearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConversationalSearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ConversationalSearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py index 449d05ddfd73..e9cb8d6b0860 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py @@ -2387,13 +2387,13 @@ def test_list_data_stores_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_stores(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5344,6 +5344,7 @@ def test_data_store_service_base_transport(): "delete_data_store", "update_data_store", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -6053,6 +6054,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -6179,6 +6243,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = DataStoreServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index 515e4272da93..4bb1e19343c8 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -1950,13 +1950,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6453,6 +6453,7 @@ def test_document_service_base_transport(): "import_documents", "purge_documents", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -7151,6 +7152,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -7277,6 +7341,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py index 9d710e7ad275..4509871a2120 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py @@ -3145,13 +3145,13 @@ def test_list_engines_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_engines(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7373,6 +7373,7 @@ def test_engine_service_base_transport(): "resume_engine", "tune_engine", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -8035,6 +8036,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -8161,6 +8225,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = EngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py index 25ab4e4a6c69..721d1bd0731c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py @@ -1893,6 +1893,7 @@ def test_grounded_generation_service_base_transport(): methods = ( "check_grounding", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2457,6 +2458,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2583,6 +2647,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = GroundedGenerationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = GroundedGenerationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = GroundedGenerationServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py new file mode 100644 index 000000000000..0d554e8e3bdd --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py @@ -0,0 +1,3269 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.project_service import ( + ProjectServiceAsyncClient, + ProjectServiceClient, + transports, +) +from google.cloud.discoveryengine_v1beta.types import project, project_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProjectServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ProjectServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProjectServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ProjectServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ProjectServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ProjectServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ProjectServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ProjectServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ProjectServiceClient._get_client_cert_source(None, False) is None + assert ( + ProjectServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ProjectServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ProjectServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ProjectServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ProjectServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ProjectServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProjectServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ProjectServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ProjectServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ProjectServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ProjectServiceClient._get_universe_domain(None, None) + == ProjectServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ProjectServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProjectServiceClient, "grpc"), + (ProjectServiceAsyncClient, "grpc_asyncio"), + (ProjectServiceClient, "rest"), + ], +) +def test_project_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ProjectServiceGrpcTransport, "grpc"), + (transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ProjectServiceRestTransport, "rest"), + ], +) +def test_project_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProjectServiceClient, "grpc"), + (ProjectServiceAsyncClient, "grpc_asyncio"), + (ProjectServiceClient, "rest"), + ], +) +def test_project_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_project_service_client_get_transport_class(): + transport = ProjectServiceClient.get_transport_class() + available_transports = [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceRestTransport, + ] + assert transport in available_transports + + transport = ProjectServiceClient.get_transport_class("grpc") + assert transport == transports.ProjectServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +def test_project_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProjectServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProjectServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc", "true"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc", "false"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", "true"), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_project_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ProjectServiceClient, ProjectServiceAsyncClient] +) +@mock.patch.object( + ProjectServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProjectServiceAsyncClient), +) +def test_project_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ProjectServiceClient, ProjectServiceAsyncClient] +) +@mock.patch.object( + ProjectServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceClient), +) +@mock.patch.object( + ProjectServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProjectServiceAsyncClient), +) +def test_project_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), + ], +) +def test_project_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProjectServiceClient, + transports.ProjectServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", None), + ], +) +def test_project_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_project_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.project_service.transports.ProjectServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ProjectServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProjectServiceClient, + transports.ProjectServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_project_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + project_service.ProvisionProjectRequest, + dict, + ], +) +def test_provision_project(request_type, transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = project_service.ProvisionProjectRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_provision_project_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_project() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.ProvisionProjectRequest() + + +def test_provision_project_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = project_service.ProvisionProjectRequest( + name="name_value", + data_use_terms_version="data_use_terms_version_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_project(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.ProvisionProjectRequest( + name="name_value", + data_use_terms_version="data_use_terms_version_value", + ) + + +def test_provision_project_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.provision_project in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_project + ] = mock_rpc + request = {} + client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.provision_project(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_provision_project_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.provision_project() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.ProvisionProjectRequest() + + +@pytest.mark.asyncio +async def test_provision_project_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.provision_project + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.provision_project + ] = mock_object + + request = {} + await client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.provision_project(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_provision_project_async( + transport: str = "grpc_asyncio", + request_type=project_service.ProvisionProjectRequest, +): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = project_service.ProvisionProjectRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_provision_project_async_from_dict(): + await test_provision_project_async(request_type=dict) + + +def test_provision_project_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = project_service.ProvisionProjectRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_provision_project_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = project_service.ProvisionProjectRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_provision_project_flattened(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.provision_project( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_provision_project_flattened_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.provision_project( + project_service.ProvisionProjectRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_provision_project_flattened_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_project), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.provision_project( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_provision_project_flattened_error_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.provision_project( + project_service.ProvisionProjectRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + project_service.ProvisionProjectRequest, + dict, + ], +) +def test_provision_project_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.provision_project(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_provision_project_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.provision_project in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_project + ] = mock_rpc + + request = {} + client.provision_project(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.provision_project(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_provision_project_rest_required_fields( + request_type=project_service.ProvisionProjectRequest, +): + transport_class = transports.ProjectServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["accept_data_use_terms"] = False + request_init["data_use_terms_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provision_project._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["acceptDataUseTerms"] = True + jsonified_request["dataUseTermsVersion"] = "data_use_terms_version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provision_project._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "acceptDataUseTerms" in jsonified_request + assert jsonified_request["acceptDataUseTerms"] == True + assert "dataUseTermsVersion" in jsonified_request + assert jsonified_request["dataUseTermsVersion"] == "data_use_terms_version_value" + + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.provision_project(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_provision_project_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.provision_project._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "acceptDataUseTerms", + "dataUseTermsVersion", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_provision_project_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProjectServiceRestInterceptor(), + ) + client = ProjectServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_provision_project" + ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "pre_provision_project" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = project_service.ProvisionProjectRequest.pb( + project_service.ProvisionProjectRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = project_service.ProvisionProjectRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.provision_project( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_provision_project_rest_bad_request( + transport: str = "rest", request_type=project_service.ProvisionProjectRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.provision_project(request) + + +def test_provision_project_rest_flattened(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.provision_project(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*}:provision" % client.transport._host, args[1] + ) + + +def test_provision_project_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.provision_project( + project_service.ProvisionProjectRequest(), + name="name_value", + ) + + +def test_provision_project_rest_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProjectServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ProjectServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + transports.ProjectServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProjectServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ProjectServiceGrpcTransport, + ) + + +def test_project_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProjectServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_project_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.project_service.transports.ProjectServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProjectServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "provision_project", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_project_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.project_service.transports.ProjectServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_project_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.project_service.transports.ProjectServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectServiceTransport() + adc.assert_called_once() + + +def test_project_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProjectServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + transports.ProjectServiceRestTransport, + ], +) +def test_project_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ProjectServiceGrpcTransport, grpc_helpers), + (transports.ProjectServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_project_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_project_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProjectServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_project_service_rest_lro_client(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_project_service_host_no_port(transport_name): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_project_service_host_with_port(transport_name): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_project_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProjectServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProjectServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.provision_project._session + session2 = client2.transport.provision_project._session + assert session1 != session2 + + +def test_project_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProjectServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_project_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProjectServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_project_service_grpc_lro_client(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_project_service_grpc_lro_async_client(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ProjectServiceClient.project_path(project) + assert expected == actual + + +def test_parse_project_path(): + expected = { + "project": "clam", + } + path = ProjectServiceClient.project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_project_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProjectServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ProjectServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProjectServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ProjectServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProjectServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ProjectServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ProjectServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ProjectServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProjectServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ProjectServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ProjectServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProjectServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProjectServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport), + (ProjectServiceAsyncClient, transports.ProjectServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py index 5661ad9833d9..4acb0074d6d1 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py @@ -1753,6 +1753,7 @@ def test_rank_service_base_transport(): methods = ( "rank", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2298,6 +2299,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2424,6 +2488,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RankServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RankServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = RankServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py index 07d448469878..6c9b8fdff9eb 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py @@ -1896,6 +1896,7 @@ def test_recommendation_service_base_transport(): methods = ( "recommend", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2288,12 +2289,38 @@ def test_recommendation_service_transport_channel_mtls_with_adc(transport_class) assert transport.grpc_channel == mock_grpc_channel -def test_document_path(): +def test_data_store_path(): project = "squid" location = "clam" data_store = "whelk" - branch = "octopus" - document = "oyster" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = RecommendationServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = RecommendationServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = RecommendationServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_document_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + branch = "nautilus" + document = "scallop" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( project=project, location=location, @@ -2309,11 +2336,11 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "data_store": "mussel", - "branch": "winkle", - "document": "nautilus", + "project": "abalone", + "location": "squid", + "data_store": "clam", + "branch": "whelk", + "document": "octopus", } path = RecommendationServiceClient.document_path(**expected) @@ -2322,11 +2349,42 @@ def test_parse_document_path(): assert expected == actual +def test_engine_path(): + project = "oyster" + location = "nudibranch" + collection = "cuttlefish" + engine = "mussel" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = RecommendationServiceClient.engine_path( + project, location, collection, engine + ) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "collection": "scallop", + "engine": "abalone", + } + path = RecommendationServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = RecommendationServiceClient.parse_engine_path(path) + assert expected == actual + + def test_serving_config_path(): - project = "scallop" - location = "abalone" - data_store = "squid" - serving_config = "clam" + project = "squid" + location = "clam" + data_store = "whelk" + serving_config = "octopus" expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( project=project, location=location, @@ -2341,10 +2399,10 @@ def test_serving_config_path(): def test_parse_serving_config_path(): expected = { - "project": "whelk", - "location": "octopus", - "data_store": "oyster", - "serving_config": "nudibranch", + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "serving_config": "mussel", } path = RecommendationServiceClient.serving_config_path(**expected) @@ -2354,7 +2412,7 @@ def test_parse_serving_config_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2364,7 +2422,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nautilus", } path = RecommendationServiceClient.common_billing_account_path(**expected) @@ -2374,7 +2432,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -2384,7 +2442,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "abalone", } path = RecommendationServiceClient.common_folder_path(**expected) @@ -2394,7 +2452,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -2404,7 +2462,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "clam", } path = RecommendationServiceClient.common_organization_path(**expected) @@ -2414,7 +2472,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -2424,7 +2482,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "octopus", } path = RecommendationServiceClient.common_project_path(**expected) @@ -2434,8 +2492,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2446,8 +2504,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "cuttlefish", + "location": "mussel", } path = RecommendationServiceClient.common_location_path(**expected) @@ -2493,6 +2551,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2619,6 +2740,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RecommendationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RecommendationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = RecommendationServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py index aa284f8138ba..28090dbdfcf0 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py @@ -1899,13 +1899,13 @@ def test_list_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5018,6 +5018,7 @@ def test_schema_service_base_transport(): "update_schema", "delete_schema", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -5669,6 +5670,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -5795,6 +5859,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py index 1ed1e935f6ad..72909d4f915b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py @@ -1478,13 +1478,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("serving_config", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2090,6 +2090,7 @@ def test_search_service_base_transport(): methods = ( "search", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2738,6 +2739,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2864,6 +2928,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SearchServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py index 081b70db4294..18a0b9ff4a83 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py @@ -59,6 +59,7 @@ transports, ) from google.cloud.discoveryengine_v1beta.types import ( + custom_tuning_model, import_config, search_tuning_service, ) @@ -1267,6 +1268,7 @@ def test_train_custom_model_non_empty_request_with_auto_populated_field(): request = search_tuning_service.TrainCustomModelRequest( data_store="data_store_value", model_type="model_type_value", + model_id="model_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1282,6 +1284,7 @@ def test_train_custom_model_non_empty_request_with_auto_populated_field(): assert args[0] == search_tuning_service.TrainCustomModelRequest( data_store="data_store_value", model_type="model_type_value", + model_id="model_id_value", ) @@ -1505,6 +1508,304 @@ async def test_train_custom_model_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = search_tuning_service.ListCustomModelsResponse() + response = client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +def test_list_custom_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + +def test_list_custom_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +@pytest.mark.asyncio +async def test_list_custom_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_custom_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_custom_models + ] = mock_object + + request = {} + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.ListCustomModelsRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +@pytest.mark.asyncio +async def test_list_custom_models_async_from_dict(): + await test_list_custom_models_async(request_type=dict) + + +def test_list_custom_models_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = search_tuning_service.ListCustomModelsResponse() + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_custom_models_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1512,7 +1813,260 @@ async def test_train_custom_model_field_headers_async(): dict, ], ) -def test_train_custom_model_rest(request_type): +def test_train_custom_model_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_custom_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_custom_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_train_custom_model_rest_required_fields( + request_type=search_tuning_service.TrainCustomModelRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_custom_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_custom_model_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_custom_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_custom_model_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.TrainCustomModelRequest.pb( + search_tuning_service.TrainCustomModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = search_tuning_service.TrainCustomModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_custom_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_custom_model_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_custom_model(request) + + +def test_train_custom_model_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models_rest(request_type): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1527,22 +2081,24 @@ def test_train_custom_model_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = search_tuning_service.ListCustomModelsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.train_custom_model(request) + response = client.list_custom_models(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) -def test_train_custom_model_rest_use_cached_wrapped_rpc(): +def test_list_custom_models_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1557,7 +2113,7 @@ def test_train_custom_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.train_custom_model in client._transport._wrapped_methods + client._transport.list_custom_models in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -1566,28 +2122,24 @@ def test_train_custom_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.train_custom_model + client._transport.list_custom_models ] = mock_rpc request = {} - client.train_custom_model(request) + client.list_custom_models(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.train_custom_model(request) + client.list_custom_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_train_custom_model_rest_required_fields( - request_type=search_tuning_service.TrainCustomModelRequest, +def test_list_custom_models_rest_required_fields( + request_type=search_tuning_service.ListCustomModelsRequest, ): transport_class = transports.SearchTuningServiceRestTransport @@ -1603,7 +2155,7 @@ def test_train_custom_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).train_custom_model._get_unset_required_fields(jsonified_request) + ).list_custom_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1612,7 +2164,7 @@ def test_train_custom_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).train_custom_model._get_unset_required_fields(jsonified_request) + ).list_custom_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1626,7 +2178,7 @@ def test_train_custom_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = search_tuning_service.ListCustomModelsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1638,37 +2190,41 @@ def test_train_custom_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.train_custom_model(request) + response = client.list_custom_models(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_train_custom_model_rest_unset_required_fields(): +def test_list_custom_models_rest_unset_required_fields(): transport = transports.SearchTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.train_custom_model._get_unset_required_fields({}) + unset_fields = transport.list_custom_models._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("dataStore",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_train_custom_model_rest_interceptors(null_interceptor): +def test_list_custom_models_rest_interceptors(null_interceptor): transport = transports.SearchTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1681,16 +2237,14 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" ) as post, mock.patch.object( - transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = search_tuning_service.TrainCustomModelRequest.pb( - search_tuning_service.TrainCustomModelRequest() + pb_message = search_tuning_service.ListCustomModelsRequest.pb( + search_tuning_service.ListCustomModelsRequest() ) transcode.return_value = { "method": "post", @@ -1702,19 +2256,21 @@ def test_train_custom_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + search_tuning_service.ListCustomModelsResponse.to_json( + search_tuning_service.ListCustomModelsResponse() + ) ) - request = search_tuning_service.TrainCustomModelRequest() + request = search_tuning_service.ListCustomModelsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = search_tuning_service.ListCustomModelsResponse() - client.train_custom_model( + client.list_custom_models( request, metadata=[ ("key", "val"), @@ -1726,8 +2282,8 @@ def test_train_custom_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_train_custom_model_rest_bad_request( - transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +def test_list_custom_models_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.ListCustomModelsRequest ): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1749,10 +2305,10 @@ def test_train_custom_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.train_custom_model(request) + client.list_custom_models(request) -def test_train_custom_model_rest_error(): +def test_list_custom_models_rest_error(): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1898,7 +2454,9 @@ def test_search_tuning_service_base_transport(): # raise NotImplementedError. methods = ( "train_custom_model", + "list_custom_models", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -2185,6 +2743,9 @@ def test_search_tuning_service_client_transport_session_collision(transport_name session1 = client1.transport.train_custom_model._session session2 = client2.transport.train_custom_model._session assert session1 != session2 + session1 = client1.transport.list_custom_models._session + session2 = client2.transport.list_custom_models._session + assert session1 != session2 def test_search_tuning_service_grpc_transport_channel(): @@ -2347,10 +2908,41 @@ def test_search_tuning_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_data_store_path(): +def test_custom_tuning_model_path(): project = "squid" location = "clam" data_store = "whelk" + custom_tuning_model = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + actual = SearchTuningServiceClient.custom_tuning_model_path( + project, location, data_store, custom_tuning_model + ) + assert expected == actual + + +def test_parse_custom_tuning_model_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "custom_tuning_model": "mussel", + } + path = SearchTuningServiceClient.custom_tuning_model_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_custom_tuning_model_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( project=project, location=location, @@ -2362,9 +2954,9 @@ def test_data_store_path(): def test_parse_data_store_path(): expected = { - "project": "octopus", - "location": "oyster", - "data_store": "nudibranch", + "project": "abalone", + "location": "squid", + "data_store": "clam", } path = SearchTuningServiceClient.data_store_path(**expected) @@ -2374,7 +2966,7 @@ def test_parse_data_store_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2384,7 +2976,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = SearchTuningServiceClient.common_billing_account_path(**expected) @@ -2394,7 +2986,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -2404,7 +2996,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = SearchTuningServiceClient.common_folder_path(**expected) @@ -2414,7 +3006,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -2424,7 +3016,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = SearchTuningServiceClient.common_organization_path(**expected) @@ -2434,7 +3026,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -2444,7 +3036,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = SearchTuningServiceClient.common_project_path(**expected) @@ -2454,8 +3046,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2466,8 +3058,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = SearchTuningServiceClient.common_location_path(**expected) @@ -2513,6 +3105,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -2639,6 +3294,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SearchTuningServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py index 7aee8c05d97b..81e545de6116 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -2571,13 +2571,13 @@ def test_list_serving_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_serving_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4101,6 +4101,7 @@ def test_serving_config_service_base_transport(): "get_serving_config", "list_serving_configs", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -4670,6 +4671,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -4796,6 +4860,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = ServingConfigServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py index 6256dba8bcf7..59b6762acf50 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py @@ -2332,6 +2332,7 @@ def test_get_target_site(request_type, transport: str = "grpc"): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) response = client.get_target_site(request) @@ -2349,6 +2350,7 @@ def test_get_target_site(request_type, transport: str = "grpc"): assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -2455,6 +2457,7 @@ async def test_get_target_site_empty_call_async(): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) ) @@ -2534,6 +2537,7 @@ async def test_get_target_site_async( type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) ) @@ -2552,6 +2556,7 @@ async def test_get_target_site_async( assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -3921,13 +3926,13 @@ def test_list_target_sites_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_target_sites(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5652,13 +5657,13 @@ def test_fetch_domain_verification_status_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("site_search_engine", ""),)), ) pager = client.fetch_domain_verification_status(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6149,6 +6154,7 @@ def test_create_target_site_rest(request_type): "type_": 1, "exact_match": True, "generated_uri_pattern": "generated_uri_pattern_value", + "root_domain_uri": "root_domain_uri_value", "site_verification_info": { "site_verification_state": 1, "verify_time": {"seconds": 751, "nanos": 543}, @@ -6823,6 +6829,7 @@ def test_get_target_site_rest(request_type): type_=site_search_engine.TargetSite.Type.INCLUDE, exact_match=True, generated_uri_pattern="generated_uri_pattern_value", + root_domain_uri="root_domain_uri_value", indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, ) @@ -6844,6 +6851,7 @@ def test_get_target_site_rest(request_type): assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE assert response.exact_match is True assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert response.root_domain_uri == "root_domain_uri_value" assert ( response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING ) @@ -7142,6 +7150,7 @@ def test_update_target_site_rest(request_type): "type_": 1, "exact_match": True, "generated_uri_pattern": "generated_uri_pattern_value", + "root_domain_uri": "root_domain_uri_value", "site_verification_info": { "site_verification_state": 1, "verify_time": {"seconds": 751, "nanos": 543}, @@ -9754,6 +9763,7 @@ def test_site_search_engine_service_base_transport(): "batch_verify_target_sites", "fetch_domain_verification_status", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -10436,6 +10446,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -10562,6 +10635,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index b45c2bcdf15f..ea63abd70f5e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -1189,6 +1189,8 @@ def test_write_user_event(request_type, transport: str = "grpc"): call.return_value = user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1208,6 +1210,8 @@ def test_write_user_event(request_type, transport: str = "grpc"): assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -1316,6 +1320,8 @@ async def test_write_user_event_empty_call_async(): user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1397,6 +1403,8 @@ async def test_write_user_event_async( user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -1417,6 +1425,8 @@ async def test_write_user_event_async( assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -2129,6 +2139,8 @@ def test_write_user_event_rest(request_type): request_init["user_event"] = { "event_type": "event_type_value", "user_pseudo_id": "user_pseudo_id_value", + "engine": "engine_value", + "data_store": "data_store_value", "event_time": {"seconds": 751, "nanos": 543}, "user_info": {"user_id": "user_id_value", "user_agent": "user_agent_value"}, "direct_user_request": True, @@ -2256,6 +2268,8 @@ def get_message_fields(field): return_value = user_event.UserEvent( event_type="event_type_value", user_pseudo_id="user_pseudo_id_value", + engine="engine_value", + data_store="data_store_value", direct_user_request=True, session_id="session_id_value", attribution_token="attribution_token_value", @@ -2279,6 +2293,8 @@ def get_message_fields(field): assert isinstance(response, user_event.UserEvent) assert response.event_type == "event_type_value" assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.engine == "engine_value" + assert response.data_store == "data_store_value" assert response.direct_user_request is True assert response.session_id == "session_id_value" assert response.attribution_token == "attribution_token_value" @@ -2352,6 +2368,8 @@ def test_write_user_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).write_user_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("write_async",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2407,7 +2425,7 @@ def test_write_user_event_rest_unset_required_fields(): unset_fields = transport.write_user_event._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("writeAsync",)) & set( ( "parent", @@ -3175,6 +3193,7 @@ def test_user_event_service_base_transport(): "collect_user_event", "import_user_events", "get_operation", + "cancel_operation", "list_operations", ) for method in methods: @@ -3687,8 +3706,37 @@ def test_parse_document_path(): assert expected == actual +def test_engine_path(): + project = "oyster" + location = "nudibranch" + collection = "cuttlefish" + engine = "mussel" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = UserEventServiceClient.engine_path(project, location, collection, engine) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "collection": "scallop", + "engine": "abalone", + } + path = UserEventServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_engine_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3698,7 +3746,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = UserEventServiceClient.common_billing_account_path(**expected) @@ -3708,7 +3756,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -3718,7 +3766,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = UserEventServiceClient.common_folder_path(**expected) @@ -3728,7 +3776,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -3738,7 +3786,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = UserEventServiceClient.common_organization_path(**expected) @@ -3748,7 +3796,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -3758,7 +3806,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = UserEventServiceClient.common_project_path(**expected) @@ -3768,8 +3816,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3780,8 +3828,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = UserEventServiceClient.common_location_path(**expected) @@ -3827,6 +3875,69 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): @@ -3953,6 +4064,145 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_cancel_operation(transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_get_operation(transport: str = "grpc"): client = UserEventServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/google-cloud-dlp/CHANGELOG.md b/packages/google-cloud-dlp/CHANGELOG.md index 574ef5ae0129..9368f304351a 100644 --- a/packages/google-cloud-dlp/CHANGELOG.md +++ b/packages/google-cloud-dlp/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-dlp/#history +## [3.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.17.0...google-cloud-dlp-v3.18.0) (2024-05-27) + + +### Features + +* add secrets discovery support ([dec2866](https://github.com/googleapis/google-cloud-python/commit/dec2866236f474691c432c4950dc7bf9ba33dac2)) + + +### Documentation + +* Updated method documentation ([dec2866](https://github.com/googleapis/google-cloud-python/commit/dec2866236f474691c432c4950dc7bf9ba33dac2)) + ## [3.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.16.0...google-cloud-dlp-v3.17.0) (2024-05-07) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py index 9ca2d8907cd5..d53527adb977 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py @@ -211,6 +211,7 @@ SearchConnectionsRequest, SearchConnectionsResponse, SecretManagerCredential, + SecretsDiscoveryTarget, StatisticalTable, StorageMetadataLabel, StoredInfoType, @@ -273,6 +274,7 @@ StorageConfig, StoredType, TableOptions, + TableReference, ) __all__ = ( @@ -453,6 +455,7 @@ "SearchConnectionsRequest", "SearchConnectionsResponse", "SecretManagerCredential", + "SecretsDiscoveryTarget", "StatisticalTable", "StorageMetadataLabel", "StoredInfoType", @@ -526,6 +529,7 @@ "StorageConfig", "StoredType", "TableOptions", + "TableReference", "FileType", "Likelihood", ) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py index 57f5392aa8a3..5676b65ac60a 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py @@ -210,6 +210,7 @@ SearchConnectionsRequest, SearchConnectionsResponse, SecretManagerCredential, + SecretsDiscoveryTarget, StatisticalTable, StorageMetadataLabel, StoredInfoType, @@ -272,6 +273,7 @@ StorageConfig, StoredType, TableOptions, + TableReference, ) __all__ = ( @@ -488,6 +490,7 @@ "SearchConnectionsRequest", "SearchConnectionsResponse", "SecretManagerCredential", + "SecretsDiscoveryTarget", "SensitivityScore", "StatisticalTable", "StorageConfig", @@ -502,6 +505,7 @@ "TableDataProfile", "TableLocation", "TableOptions", + "TableReference", "TimePartConfig", "TransformationConfig", "TransformationContainerType", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py index d818ff0be50e..676b9c48983f 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -2126,7 +2126,7 @@ async def sample_create_job_trigger(): Returns: google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -2253,7 +2253,7 @@ async def sample_update_job_trigger(): Returns: google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -2478,7 +2478,7 @@ async def sample_get_job_trigger(): Returns: google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py index ca8d22c42573..7c9b72d8ebeb 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py @@ -2703,7 +2703,7 @@ def sample_create_job_trigger(): Returns: google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -2827,7 +2827,7 @@ def sample_update_job_trigger(): Returns: google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -3048,7 +3048,7 @@ def sample_get_job_trigger(): Returns: google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py index cc191b468332..78872f5867c3 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -2340,7 +2340,7 @@ def __call__( Returns: ~.dlp.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -3976,7 +3976,7 @@ def __call__( Returns: ~.dlp.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -6406,7 +6406,7 @@ def __call__( Returns: ~.dlp.JobTrigger: - Contains a configuration to make api + Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py index d5abe039136d..c56eeec4c400 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py @@ -204,6 +204,7 @@ SearchConnectionsRequest, SearchConnectionsResponse, SecretManagerCredential, + SecretsDiscoveryTarget, StatisticalTable, StorageMetadataLabel, StoredInfoType, @@ -266,6 +267,7 @@ StorageConfig, StoredType, TableOptions, + TableReference, ) __all__ = ( @@ -444,6 +446,7 @@ "SearchConnectionsRequest", "SearchConnectionsResponse", "SecretManagerCredential", + "SecretsDiscoveryTarget", "StatisticalTable", "StorageMetadataLabel", "StoredInfoType", @@ -517,6 +520,7 @@ "StorageConfig", "StoredType", "TableOptions", + "TableReference", "FileType", "Likelihood", ) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py index 7431286b176b..739986ccb331 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -193,6 +193,7 @@ "DatabaseResourceReference", "DiscoveryCloudSqlConditions", "DiscoveryCloudSqlGenerationCadence", + "SecretsDiscoveryTarget", "DiscoveryStartingLocation", "DlpJob", "GetDlpJobRequest", @@ -646,9 +647,9 @@ class ResourceVisibility(proto.Enum): Visible to any user. RESOURCE_VISIBILITY_INCONCLUSIVE (15): May contain public items. - For example, if a GCS bucket has uniform bucket - level access disabled, some objects inside it - may be public. + For example, if a Cloud Storage bucket has + uniform bucket level access disabled, some + objects inside it may be public. RESOURCE_VISIBILITY_RESTRICTED (20): Visible only to specific users. """ @@ -744,10 +745,12 @@ class ConnectionState(proto.Enum): again until it is set to AVAILABLE. If the resolution requires external action, then - a request to set the status to AVAILABLE will - mark this connection for use. Otherwise, any - changes to the connection properties will - automatically mark it as AVAILABLE. + the client must send a request to set the status + to AVAILABLE when the connection is ready for + use. If the resolution doesn't require external + action, then any changes to the connection + properties will automatically mark it as + AVAILABLE. """ CONNECTION_STATE_UNSPECIFIED = 0 MISSING_CREDENTIALS = 1 @@ -2511,9 +2514,9 @@ class Result(proto.Message): Statistics of how many instances of each info type were found during inspect job. num_rows_processed (int): - Number of rows scanned post sampling and time - filtering (Applicable for row based stores such - as BigQuery). + Number of rows scanned after sampling and + time filtering (applicable for row based stores + such as BigQuery). hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): Statistics related to the processing of hybrid inspect. @@ -2832,6 +2835,8 @@ class LocationCategory(proto.Enum): The infoType is typically used in Argentina. AUSTRALIA (3): The infoType is typically used in Australia. + AZERBAIJAN (48): + The infoType is typically used in Azerbaijan. BELGIUM (4): The infoType is typically used in Belgium. BRAZIL (5): @@ -2932,6 +2937,7 @@ class LocationCategory(proto.Enum): GLOBAL = 1 ARGENTINA = 2 AUSTRALIA = 3 + AZERBAIJAN = 48 BELGIUM = 4 BRAZIL = 5 CANADA = 6 @@ -6413,7 +6419,7 @@ class Error(proto.Message): class JobTrigger(proto.Message): - r"""Contains a configuration to make api calls on a repeating + r"""Contains a configuration to make API calls on a repeating basis. See https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers to learn more. @@ -7735,14 +7741,10 @@ class EventType(proto.Enum): NEW_PROFILE (1): New profile (not a re-profile). CHANGED_PROFILE (2): - Changed one of the following profile metrics: - - - Data risk score - - Sensitivity score - - Resource visibility - - Encryption type - - Predicted infoTypes - - Other infoTypes + One of the following profile metrics changed: + Data risk score, Sensitivity score, Resource + visibility, Encryption type, Predicted + infoTypes, Other infoTypes SCORE_INCREASED (3): Table data risk score or sensitivity score increased. @@ -8192,6 +8194,14 @@ class DiscoveryTarget(proto.Message): Cloud SQL target for Discovery. The first target to match a table will be the one applied. + This field is a member of `oneof`_ ``target``. + secrets_target (google.cloud.dlp_v2.types.SecretsDiscoveryTarget): + Discovery target that looks for credentials + and secrets stored in cloud resource metadata + and reports them as vulnerabilities to Security + Command Center. Only one target of this type is + allowed. + This field is a member of `oneof`_ ``target``. """ @@ -8207,6 +8217,12 @@ class DiscoveryTarget(proto.Message): oneof="target", message="CloudSqlDiscoveryTarget", ) + secrets_target: "SecretsDiscoveryTarget" = proto.Field( + proto.MESSAGE, + number=3, + oneof="target", + message="SecretsDiscoveryTarget", + ) class BigQueryDiscoveryTarget(proto.Message): @@ -8297,6 +8313,13 @@ class DiscoveryBigQueryFilter(proto.Message): configuration. If none is specified, a default one will be added automatically. + This field is a member of `oneof`_ ``filter``. + table_reference (google.cloud.dlp_v2.types.TableReference): + The table to scan. Discovery configurations + including this can only include one + DiscoveryTarget (the DiscoveryTarget with this + TableReference). + This field is a member of `oneof`_ ``filter``. """ @@ -8319,6 +8342,12 @@ class AllOtherBigQueryTables(proto.Message): oneof="filter", message=AllOtherBigQueryTables, ) + table_reference: storage.TableReference = proto.Field( + proto.MESSAGE, + number=3, + oneof="filter", + message=storage.TableReference, + ) class BigQueryTableCollection(proto.Message): @@ -8669,8 +8698,8 @@ class DatabaseResourceRegex(proto.Message): Attributes: project_id_regex (str): For organizations, if unset, will match all - projects. Has no effect for Data Profile - configurations created within a project. + projects. Has no effect for configurations + created within a project. instance_regex (str): Regex to test the instance name against. If empty, all instances match. @@ -8714,10 +8743,16 @@ class DatabaseResourceReference(proto.Message): Attributes: project_id (str): Required. If within a project-level config, - then this must match the config's project id. + then this must match the config's project ID. instance (str): Required. The instance where this resource is - located. For example: Cloud SQL's instance id. + located. For example: Cloud SQL instance ID. + database (str): + Required. Name of a database within the + instance. + database_resource (str): + Required. Name of a database resource, for + example, a table within the database. """ project_id: str = proto.Field( @@ -8728,6 +8763,14 @@ class DatabaseResourceReference(proto.Message): proto.STRING, number=2, ) + database: str = proto.Field( + proto.STRING, + number=3, + ) + database_resource: str = proto.Field( + proto.STRING, + number=4, + ) class DiscoveryCloudSqlConditions(proto.Message): @@ -8754,9 +8797,9 @@ class DatabaseEngine(proto.Enum): ALL_SUPPORTED_DATABASE_ENGINES (1): Include all supported database engines. MYSQL (2): - MySql database. + MySQL database. POSTGRES (3): - PostGres database. + PostgreSQL database. """ DATABASE_ENGINE_UNSPECIFIED = 0 ALL_SUPPORTED_DATABASE_ENGINES = 1 @@ -8805,11 +8848,11 @@ class DiscoveryCloudSqlGenerationCadence(proto.Message): SQL tables can't trigger reprofiling. If you set this field, profiles are refreshed at this frequency regardless of whether the underlying - tables have changes. Defaults to never. + tables have changed. Defaults to never. """ class SchemaModifiedCadence(proto.Message): - r"""How frequency to modify the profile when the table's schema + r"""How frequently to modify the profile when the table's schema is modified. Attributes: @@ -8828,7 +8871,7 @@ class CloudSqlSchemaModification(proto.Enum): SQL_SCHEMA_MODIFICATION_UNSPECIFIED (0): Unused. NEW_COLUMNS (1): - New columns has appeared. + New columns have appeared. REMOVED_COLUMNS (2): Columns have been removed from the table. """ @@ -8861,6 +8904,24 @@ class CloudSqlSchemaModification(proto.Enum): ) +class SecretsDiscoveryTarget(proto.Message): + r"""Discovery target for credentials and secrets in cloud resource + metadata. + + This target does not include any filtering or frequency controls. + Cloud DLP will scan cloud resource metadata for secrets daily. + + No inspect template should be included in the discovery config for a + security benchmarks scan. Instead, the built-in list of secrets and + credentials infoTypes will be used (see + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference#credentials_and_secrets). + + Credentials and secrets discovered will be reported as + vulnerabilities to Security Command Center. + + """ + + class DiscoveryStartingLocation(proto.Message): r"""The location to begin a discovery scan. Denotes an organization ID or folder ID within an organization. @@ -11399,7 +11460,7 @@ class ListConnectionsRequest(proto.Message): other request fields must match the original request. filter (str): - Optional. \* Supported fields/values - ``state`` - + Optional. Supported field/value: ``state`` - MISSING|AVAILABLE|ERROR """ @@ -11437,7 +11498,7 @@ class SearchConnectionsRequest(proto.Message): other request fields must match the original request. filter (str): - Optional. \* Supported fields/values - ``state`` - + Optional. Supported field/value: - ``state`` - MISSING|AVAILABLE|ERROR """ @@ -11634,9 +11695,9 @@ class SecretManagerCredential(proto.Message): class CloudSqlIamCredential(proto.Message): - r"""Use IAM auth to connect. This requires the Cloud SQL IAM - feature to be enabled on the instance, which is not the default - for Cloud SQL. See + r"""Use IAM authentication to connect. This requires the Cloud + SQL IAM feature to be enabled on the instance, which is not the + default for Cloud SQL. See https://cloud.google.com/sql/docs/postgres/authentication and https://cloud.google.com/sql/docs/mysql/authentication. @@ -11692,11 +11753,11 @@ class DatabaseEngine(proto.Enum): Values: DATABASE_ENGINE_UNKNOWN (0): An engine that is not currently supported by - SDP. + Sensitive Data Protection. DATABASE_ENGINE_MYSQL (1): Cloud SQL for MySQL instance. DATABASE_ENGINE_POSTGRES (2): - Cloud SQL for Postgres instance. + Cloud SQL for PostgreSQL instance. """ DATABASE_ENGINE_UNKNOWN = 0 DATABASE_ENGINE_MYSQL = 1 diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py index 386bc1be369f..70efa0fe1c46 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py @@ -45,6 +45,7 @@ "Key", "RecordKey", "BigQueryTable", + "TableReference", "BigQueryField", "EntityId", "TableOptions", @@ -1500,6 +1501,27 @@ class BigQueryTable(proto.Message): ) +class TableReference(proto.Message): + r"""Message defining the location of a BigQuery table with the + projectId inferred from the parent project. + + Attributes: + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + table_id: str = proto.Field( + proto.STRING, + number=2, + ) + + class BigQueryField(proto.Message): r"""Message defining a field of a BigQuery table. diff --git a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py index 5d85411c93ee..a501445033aa 100644 --- a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -4238,13 +4238,13 @@ def test_list_inspect_templates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_inspect_templates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6450,13 +6450,13 @@ def test_list_deidentify_templates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deidentify_templates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9023,13 +9023,13 @@ def test_list_job_triggers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_job_triggers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11550,13 +11550,13 @@ def test_list_discovery_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_discovery_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12953,13 +12953,13 @@ def test_list_dlp_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_dlp_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15746,13 +15746,13 @@ def test_list_stored_info_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_stored_info_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16718,13 +16718,13 @@ def test_list_project_data_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_project_data_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17314,13 +17314,13 @@ def test_list_table_data_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_table_data_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17910,13 +17910,13 @@ def test_list_column_data_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_column_data_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -21619,13 +21619,13 @@ def test_list_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -22206,13 +22206,13 @@ def test_search_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py index deafe745fe63..164fbd81daa6 100644 --- a/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py +++ b/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -1601,13 +1601,13 @@ def test_list_migration_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_migration_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6296,13 +6296,13 @@ def test_list_connection_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connection_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9340,13 +9340,13 @@ def test_list_private_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_private_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10744,13 +10744,13 @@ def test_list_conversion_workspaces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversion_workspaces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13395,13 +13395,13 @@ def test_list_mapping_rules_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_mapping_rules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16164,13 +16164,13 @@ def test_describe_database_entities_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("conversion_workspace", ""),)), ) pager = client.describe_database_entities(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17342,13 +17342,13 @@ def test_fetch_static_ips_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.fetch_static_ips(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-documentai/CHANGELOG.md b/packages/google-cloud-documentai/CHANGELOG.md index 36965bafb771..f8877b1249fe 100644 --- a/packages/google-cloud-documentai/CHANGELOG.md +++ b/packages/google-cloud-documentai/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [2.29.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.29.0...google-cloud-documentai-v2.29.1) (2024-06-24) + + +### Documentation + +* Update the comment to add a note about `documentai.processors.create` permission ([9318d94](https://github.com/googleapis/google-cloud-python/commit/9318d94ce12bd7a2e242f1ac9cbe491307c84ea6)) + +## [2.29.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.28.0...google-cloud-documentai-v2.29.0) (2024-06-05) + + +### Features + +* [google-cloud-documentai] Make Layout Parser generally available in V1 ([#12762](https://github.com/googleapis/google-cloud-python/issues/12762)) ([9fd16ad](https://github.com/googleapis/google-cloud-python/commit/9fd16ad8837e31c3b06475b195c70c45f6979f6c)) + +## [2.28.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.27.1...google-cloud-documentai-v2.28.0) (2024-05-29) + + +### Features + +* [google-cloud-documentai] Make Layout Parser generally available in V1 ([#12745](https://github.com/googleapis/google-cloud-python/issues/12745)) ([c5b9322](https://github.com/googleapis/google-cloud-python/commit/c5b93220710708fc457934281685b635c335394a)) + ## [2.27.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.27.0...google-cloud-documentai-v2.27.1) (2024-05-16) diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index f8d26acc4504..e2d86ff384be 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.1" # {x-release-please-version} +__version__ = "2.29.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index f8d26acc4504..e2d86ff384be 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.1" # {x-release-please-version} +__version__ = "2.29.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py index 4649cc487a8a..26139b82f544 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py @@ -1934,7 +1934,11 @@ async def create_processor( r"""Creates a processor from the [ProcessorType][google.cloud.documentai.v1.ProcessorType] provided. The processor will be at ``ENABLED`` state by default - after its creation. + after its creation. Note that this method requires the + ``documentai.processors.create`` permission on the project, + which is highly privileged. A user or service account with this + permission can create new processors that can interact with any + gcs bucket in your project. .. code-block:: python diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py index 514418937be7..78b18085505d 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py @@ -2385,7 +2385,11 @@ def create_processor( r"""Creates a processor from the [ProcessorType][google.cloud.documentai.v1.ProcessorType] provided. The processor will be at ``ENABLED`` state by default - after its creation. + after its creation. Note that this method requires the + ``documentai.processors.create`` permission on the project, + which is highly privileged. A user or service account with this + permission can create new processors that can interact with any + gcs bucket in your project. .. code-block:: python diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc.py index 3ea373271a8d..8ded9062fd5f 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc.py @@ -649,7 +649,11 @@ def create_processor( Creates a processor from the [ProcessorType][google.cloud.documentai.v1.ProcessorType] provided. The processor will be at ``ENABLED`` state by default - after its creation. + after its creation. Note that this method requires the + ``documentai.processors.create`` permission on the project, + which is highly privileged. A user or service account with this + permission can create new processors that can interact with any + gcs bucket in your project. Returns: Callable[[~.CreateProcessorRequest], diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc_asyncio.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc_asyncio.py index 45ebd342b1d8..69be72fac4f8 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/grpc_asyncio.py @@ -657,7 +657,11 @@ def create_processor( Creates a processor from the [ProcessorType][google.cloud.documentai.v1.ProcessorType] provided. The processor will be at ``ENABLED`` state by default - after its creation. + after its creation. Note that this method requires the + ``documentai.processors.create`` permission on the project, + which is highly privileged. A user or service account with this + permission can create new processors that can interact with any + gcs bucket in your project. Returns: Callable[[~.CreateProcessorRequest], diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document.py index e9e6ba3c6204..8a449a678b46 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document.py @@ -103,6 +103,10 @@ class Document(proto.Message): revisions (MutableSequence[google.cloud.documentai_v1.types.Document.Revision]): Placeholder. Revision history of this document. + document_layout (google.cloud.documentai_v1.types.Document.DocumentLayout): + Parsed layout of the document. + chunked_document (google.cloud.documentai_v1.types.Document.ChunkedDocument): + Document chunked based on chunking config. """ class ShardInfo(proto.Message): @@ -1810,6 +1814,374 @@ class TextChange(proto.Message): message="Document.Provenance", ) + class DocumentLayout(proto.Message): + r"""Represents the parsed layout of a document as a collection of + blocks that the document is divided into. + + Attributes: + blocks (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock]): + List of blocks in the document. + """ + + class DocumentLayoutBlock(proto.Message): + r"""Represents a block. A block could be one of the various types + (text, table, list) supported. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text_block (google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutTextBlock): + Block consisting of text content. + + This field is a member of `oneof`_ ``block``. + table_block (google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutTableBlock): + Block consisting of table content/structure. + + This field is a member of `oneof`_ ``block``. + list_block (google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutListBlock): + Block consisting of list content/structure. + + This field is a member of `oneof`_ ``block``. + block_id (str): + ID of the block. + page_span (google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutPageSpan): + Page span of the block. + """ + + class LayoutPageSpan(proto.Message): + r"""Represents where the block starts and ends in the document. + + Attributes: + page_start (int): + Page where block starts in the document. + page_end (int): + Page where block ends in the document. + """ + + page_start: int = proto.Field( + proto.INT32, + number=1, + ) + page_end: int = proto.Field( + proto.INT32, + number=2, + ) + + class LayoutTextBlock(proto.Message): + r"""Represents a text type block. + + Attributes: + text (str): + Text content stored in the block. + type_ (str): + Type of the text in the block. Available options are: + ``paragraph``, ``subtitle``, ``heading-1``, ``heading-2``, + ``heading-3``, ``heading-4``, ``heading-5``, ``header``, + ``footer``. + blocks (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock]): + A text block could further have child blocks. + Repeated blocks support further hierarchies and + nested blocks. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + blocks: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Document.DocumentLayout.DocumentLayoutBlock", + ) + + class LayoutTableBlock(proto.Message): + r"""Represents a table type block. + + Attributes: + header_rows (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutTableRow]): + Header rows at the top of the table. + body_rows (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutTableRow]): + Body rows containing main table content. + caption (str): + Table caption/title. + """ + + header_rows: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock.LayoutTableRow" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutTableRow", + ) + body_rows: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock.LayoutTableRow" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutTableRow", + ) + caption: str = proto.Field( + proto.STRING, + number=3, + ) + + class LayoutTableRow(proto.Message): + r"""Represents a row in a table. + + Attributes: + cells (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutTableCell]): + A table row is a list of table cells. + """ + + cells: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock.LayoutTableCell" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutTableCell", + ) + + class LayoutTableCell(proto.Message): + r"""Represents a cell in a table row. + + Attributes: + blocks (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock]): + A table cell is a list of blocks. + Repeated blocks support further hierarchies and + nested blocks. + row_span (int): + How many rows this cell spans. + col_span (int): + How many columns this cell spans. + """ + + blocks: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.DocumentLayout.DocumentLayoutBlock", + ) + row_span: int = proto.Field( + proto.INT32, + number=2, + ) + col_span: int = proto.Field( + proto.INT32, + number=3, + ) + + class LayoutListBlock(proto.Message): + r"""Represents a list type block. + + Attributes: + list_entries (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock.LayoutListEntry]): + List entries that constitute a list block. + type_ (str): + Type of the list_entries (if exist). Available options are + ``ordered`` and ``unordered``. + """ + + list_entries: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock.LayoutListEntry" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutListEntry", + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + + class LayoutListEntry(proto.Message): + r"""Represents an entry in the list. + + Attributes: + blocks (MutableSequence[google.cloud.documentai_v1.types.Document.DocumentLayout.DocumentLayoutBlock]): + A list entry is a list of blocks. + Repeated blocks support further hierarchies and + nested blocks. + """ + + blocks: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.DocumentLayout.DocumentLayoutBlock", + ) + + text_block: "Document.DocumentLayout.DocumentLayoutBlock.LayoutTextBlock" = proto.Field( + proto.MESSAGE, + number=2, + oneof="block", + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutTextBlock", + ) + table_block: "Document.DocumentLayout.DocumentLayoutBlock.LayoutTableBlock" = proto.Field( + proto.MESSAGE, + number=3, + oneof="block", + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutTableBlock", + ) + list_block: "Document.DocumentLayout.DocumentLayoutBlock.LayoutListBlock" = proto.Field( + proto.MESSAGE, + number=4, + oneof="block", + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutListBlock", + ) + block_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_span: "Document.DocumentLayout.DocumentLayoutBlock.LayoutPageSpan" = proto.Field( + proto.MESSAGE, + number=5, + message="Document.DocumentLayout.DocumentLayoutBlock.LayoutPageSpan", + ) + + blocks: MutableSequence[ + "Document.DocumentLayout.DocumentLayoutBlock" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.DocumentLayout.DocumentLayoutBlock", + ) + + class ChunkedDocument(proto.Message): + r"""Represents the chunks that the document is divided into. + + Attributes: + chunks (MutableSequence[google.cloud.documentai_v1.types.Document.ChunkedDocument.Chunk]): + List of chunks. + """ + + class Chunk(proto.Message): + r"""Represents a chunk. + + Attributes: + chunk_id (str): + ID of the chunk. + source_block_ids (MutableSequence[str]): + Unused. + content (str): + Text content of the chunk. + page_span (google.cloud.documentai_v1.types.Document.ChunkedDocument.Chunk.ChunkPageSpan): + Page span of the chunk. + page_headers (MutableSequence[google.cloud.documentai_v1.types.Document.ChunkedDocument.Chunk.ChunkPageHeader]): + Page headers associated with the chunk. + page_footers (MutableSequence[google.cloud.documentai_v1.types.Document.ChunkedDocument.Chunk.ChunkPageFooter]): + Page footers associated with the chunk. + """ + + class ChunkPageSpan(proto.Message): + r"""Represents where the chunk starts and ends in the document. + + Attributes: + page_start (int): + Page where chunk starts in the document. + page_end (int): + Page where chunk ends in the document. + """ + + page_start: int = proto.Field( + proto.INT32, + number=1, + ) + page_end: int = proto.Field( + proto.INT32, + number=2, + ) + + class ChunkPageHeader(proto.Message): + r"""Represents the page header associated with the chunk. + + Attributes: + text (str): + Header in text format. + page_span (google.cloud.documentai_v1.types.Document.ChunkedDocument.Chunk.ChunkPageSpan): + Page span of the header. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + page_span: "Document.ChunkedDocument.Chunk.ChunkPageSpan" = proto.Field( + proto.MESSAGE, + number=2, + message="Document.ChunkedDocument.Chunk.ChunkPageSpan", + ) + + class ChunkPageFooter(proto.Message): + r"""Represents the page footer associated with the chunk. + + Attributes: + text (str): + Footer in text format. + page_span (google.cloud.documentai_v1.types.Document.ChunkedDocument.Chunk.ChunkPageSpan): + Page span of the footer. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + page_span: "Document.ChunkedDocument.Chunk.ChunkPageSpan" = proto.Field( + proto.MESSAGE, + number=2, + message="Document.ChunkedDocument.Chunk.ChunkPageSpan", + ) + + chunk_id: str = proto.Field( + proto.STRING, + number=1, + ) + source_block_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + content: str = proto.Field( + proto.STRING, + number=3, + ) + page_span: "Document.ChunkedDocument.Chunk.ChunkPageSpan" = proto.Field( + proto.MESSAGE, + number=4, + message="Document.ChunkedDocument.Chunk.ChunkPageSpan", + ) + page_headers: MutableSequence[ + "Document.ChunkedDocument.Chunk.ChunkPageHeader" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Document.ChunkedDocument.Chunk.ChunkPageHeader", + ) + page_footers: MutableSequence[ + "Document.ChunkedDocument.Chunk.ChunkPageFooter" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Document.ChunkedDocument.Chunk.ChunkPageFooter", + ) + + chunks: MutableSequence["Document.ChunkedDocument.Chunk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Document.ChunkedDocument.Chunk", + ) + uri: str = proto.Field( proto.STRING, number=1, @@ -1868,6 +2240,16 @@ class TextChange(proto.Message): number=13, message=Revision, ) + document_layout: DocumentLayout = proto.Field( + proto.MESSAGE, + number=17, + message=DocumentLayout, + ) + chunked_document: ChunkedDocument = proto.Field( + proto.MESSAGE, + number=18, + message=ChunkedDocument, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py index 4946d4528c0f..2d3886dfb4d4 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py @@ -116,6 +116,9 @@ class ProcessOptions(proto.Message): Only applicable to ``OCR_PROCESSOR`` and ``FORM_PARSER_PROCESSOR``. Returns error if set on other processor types. + layout_config (google.cloud.documentai_v1.types.ProcessOptions.LayoutConfig): + Optional. Only applicable to ``LAYOUT_PARSER_PROCESSOR``. + Returns error if set on other processor types. schema_override (google.cloud.documentai_v1.types.DocumentSchema): Optional. Override the schema of the [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion]. @@ -125,6 +128,42 @@ class ProcessOptions(proto.Message): doesn't support schema override. """ + class LayoutConfig(proto.Message): + r"""Serving config for layout parser processor. + + Attributes: + chunking_config (google.cloud.documentai_v1.types.ProcessOptions.LayoutConfig.ChunkingConfig): + Optional. Config for chunking in layout + parser processor. + """ + + class ChunkingConfig(proto.Message): + r"""Serving config for chunking. + + Attributes: + chunk_size (int): + Optional. The chunk sizes to use when + splitting documents, in order of level. + include_ancestor_headings (bool): + Optional. Whether or not to include ancestor + headings when splitting. + """ + + chunk_size: int = proto.Field( + proto.INT32, + number=1, + ) + include_ancestor_headings: bool = proto.Field( + proto.BOOL, + number=2, + ) + + chunking_config: "ProcessOptions.LayoutConfig.ChunkingConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ProcessOptions.LayoutConfig.ChunkingConfig", + ) + class IndividualPageSelector(proto.Message): r"""A list of individual page numbers. @@ -160,6 +199,11 @@ class IndividualPageSelector(proto.Message): number=1, message=document_io.OcrConfig, ) + layout_config: LayoutConfig = proto.Field( + proto.MESSAGE, + number=9, + message=LayoutConfig, + ) schema_override: gcd_document_schema.DocumentSchema = proto.Field( proto.MESSAGE, number=8, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/processor.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/processor.py index 4b2ea5bae7a5..7f6bea22ff3d 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/processor.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/processor.py @@ -72,6 +72,10 @@ class ProcessorVersion(proto.Message): model_type (google.cloud.documentai_v1.types.ProcessorVersion.ModelType): Output only. The model type of this processor version. + satisfies_pzs (bool): + Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. """ class State(proto.Enum): @@ -201,6 +205,14 @@ class DeprecationInfo(proto.Message): number=15, enum=ModelType, ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=16, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=17, + ) class ProcessorVersionAlias(proto.Message): @@ -256,6 +268,10 @@ class Processor(proto.Message): The `KMS key `__ used for encryption and decryption in CMEK scenarios. + satisfies_pzs (bool): + Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. """ class State(proto.Enum): @@ -344,6 +360,14 @@ class State(proto.Enum): proto.STRING, number=8, ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=12, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=13, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py index f8d26acc4504..e2d86ff384be 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.1" # {x-release-please-version} +__version__ = "2.29.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index f8d26acc4504..e2d86ff384be 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.1" # {x-release-please-version} +__version__ = "2.29.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index d3628830bd85..9f90a212ff44 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.27.1" + "version": "2.29.1" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index f3c4190e7a19..3469ef2dac36 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.27.1" + "version": "2.29.1" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index df84c72f588e..088c79f4f048 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.27.1" + "version": "2.29.1" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py index 612248f31798..9e669ec099d6 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py @@ -2804,13 +2804,13 @@ def test_list_processor_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processor_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3789,13 +3789,13 @@ def test_list_processors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3970,6 +3970,8 @@ def test_get_processor(request_type, transport: str = "grpc"): default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_processor(request) @@ -3988,6 +3990,8 @@ def test_get_processor(request_type, transport: str = "grpc"): assert response.default_processor_version == "default_processor_version_value" assert response.process_endpoint == "process_endpoint_value" assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_processor_empty_call(): @@ -4093,6 +4097,8 @@ async def test_get_processor_empty_call_async(): default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_processor() @@ -4173,6 +4179,8 @@ async def test_get_processor_async( default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_processor(request) @@ -4192,6 +4200,8 @@ async def test_get_processor_async( assert response.default_processor_version == "default_processor_version_value" assert response.process_endpoint == "process_endpoint_value" assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -4773,6 +4783,8 @@ def test_get_processor_version(request_type, transport: str = "grpc"): kms_key_version_name="kms_key_version_name_value", google_managed=True, model_type=processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_processor_version(request) @@ -4794,6 +4806,8 @@ def test_get_processor_version(request_type, transport: str = "grpc"): response.model_type == processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE ) + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_processor_version_empty_call(): @@ -4910,6 +4924,8 @@ async def test_get_processor_version_empty_call_async(): kms_key_version_name="kms_key_version_name_value", google_managed=True, model_type=processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_processor_version() @@ -4992,6 +5008,8 @@ async def test_get_processor_version_async( kms_key_version_name="kms_key_version_name_value", google_managed=True, model_type=processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_processor_version(request) @@ -5014,6 +5032,8 @@ async def test_get_processor_version_async( response.model_type == processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE ) + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -5606,13 +5626,13 @@ def test_list_processor_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processor_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6972,6 +6992,8 @@ def test_create_processor(request_type, transport: str = "grpc"): default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.create_processor(request) @@ -6990,6 +7012,8 @@ def test_create_processor(request_type, transport: str = "grpc"): assert response.default_processor_version == "default_processor_version_value" assert response.process_endpoint == "process_endpoint_value" assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_create_processor_empty_call(): @@ -7097,6 +7121,8 @@ async def test_create_processor_empty_call_async(): default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.create_processor() @@ -7177,6 +7203,8 @@ async def test_create_processor_async( default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.create_processor(request) @@ -7196,6 +7224,8 @@ async def test_create_processor_async( assert response.default_processor_version == "default_processor_version_value" assert response.process_endpoint == "process_endpoint_value" assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -10185,13 +10215,13 @@ def test_list_evaluations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_evaluations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12387,6 +12417,8 @@ def test_get_processor_rest(request_type): default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -12409,6 +12441,8 @@ def test_get_processor_rest(request_type): assert response.default_processor_version == "default_processor_version_value" assert response.process_endpoint == "process_endpoint_value" assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_processor_rest_use_cached_wrapped_rpc(): @@ -13023,6 +13057,8 @@ def test_get_processor_version_rest(request_type): kms_key_version_name="kms_key_version_name_value", google_managed=True, model_type=processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -13048,6 +13084,8 @@ def test_get_processor_version_rest(request_type): response.model_type == processor.ProcessorVersion.ModelType.MODEL_TYPE_GENERATIVE ) + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_processor_version_rest_use_cached_wrapped_rpc(): @@ -14687,6 +14725,8 @@ def test_create_processor_rest(request_type): "process_endpoint": "process_endpoint_value", "create_time": {"seconds": 751, "nanos": 543}, "kms_key_name": "kms_key_name_value", + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -14770,6 +14810,8 @@ def get_message_fields(field): default_processor_version="default_processor_version_value", process_endpoint="process_endpoint_value", kms_key_name="kms_key_name_value", + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -14792,6 +14834,8 @@ def get_message_fields(field): assert response.default_processor_version == "default_processor_version_value" assert response.process_endpoint == "process_endpoint_value" assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_create_processor_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py index 780f1a882ae2..82bd29ac3bdc 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py @@ -2812,13 +2812,13 @@ def test_list_processor_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processor_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3797,13 +3797,13 @@ def test_list_processors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5614,13 +5614,13 @@ def test_list_processor_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processor_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10193,13 +10193,13 @@ def test_list_evaluations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_evaluations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py index b90b3f540a9e..cbd4560015a1 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py @@ -2714,13 +2714,13 @@ def test_list_documents_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("dataset", ""),)), ) pager = client.list_documents(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-domains/domains-v1-py.tar.gz b/packages/google-cloud-domains/domains-v1-py.tar.gz index e69de29bb2d1..98580d4a5669 100644 Binary files a/packages/google-cloud-domains/domains-v1-py.tar.gz and b/packages/google-cloud-domains/domains-v1-py.tar.gz differ diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py index 04430136c5db..88d7181d767b 100644 --- a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py @@ -3453,13 +3453,13 @@ def test_list_registrations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_registrations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py index 4e33237676c8..e1450028a641 100644 --- a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py @@ -3453,13 +3453,13 @@ def test_list_registrations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_registrations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py b/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py index 27569bddb812..334bac2f4b8a 100644 --- a/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py +++ b/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py @@ -1546,13 +1546,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4887,13 +4887,13 @@ def test_list_node_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_node_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6991,13 +6991,13 @@ def test_list_machines_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_machines(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7970,13 +7970,13 @@ def test_list_vpn_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_vpn_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-edgenetwork/CHANGELOG.md b/packages/google-cloud-edgenetwork/CHANGELOG.md index 458a5f667c14..679635270015 100644 --- a/packages/google-cloud-edgenetwork/CHANGELOG.md +++ b/packages/google-cloud-edgenetwork/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.7...google-cloud-edgenetwork-v0.1.8) (2024-06-27) + + +### Features + +* [google-cloud-edgenetwork] A new field `bonding_type` is added to message `.google.cloud.edgenetwork.v1.Subnet` ([#12842](https://github.com/googleapis/google-cloud-python/issues/12842)) ([5f272b3](https://github.com/googleapis/google-cloud-python/commit/5f272b3293fe54dd7d73930cdd2e634b15ed3e2f)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.6...google-cloud-edgenetwork-v0.1.7) (2024-03-22) diff --git a/packages/google-cloud-edgenetwork/README.rst b/packages/google-cloud-edgenetwork/README.rst index d6ba04f873d7..a0f379a716b1 100644 --- a/packages/google-cloud-edgenetwork/README.rst +++ b/packages/google-cloud-edgenetwork/README.rst @@ -15,7 +15,7 @@ Python Client for Distributed Cloud Edge Network API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-edgenetwork.svg :target: https://pypi.org/project/google-cloud-edgenetwork/ .. _Distributed Cloud Edge Network API: https://cloud.google.com/distributed-cloud/edge/latest/docs/overview -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest/summary_overview .. _Product Documentation: https://cloud.google.com/distributed-cloud/edge/latest/docs/overview Quick Start diff --git a/packages/google-cloud-edgenetwork/docs/index.rst b/packages/google-cloud-edgenetwork/docs/index.rst index 5bfc41da6ad3..4bf5c7b789d9 100644 --- a/packages/google-cloud-edgenetwork/docs/index.rst +++ b/packages/google-cloud-edgenetwork/docs/index.rst @@ -21,3 +21,8 @@ For a list of all ``google-cloud-edgenetwork`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-edgenetwork/docs/summary_overview.md b/packages/google-cloud-edgenetwork/docs/summary_overview.md new file mode 100644 index 000000000000..b5efb059b3ec --- /dev/null +++ b/packages/google-cloud-edgenetwork/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Distributed Cloud Edge Network API API + +Overview of the APIs available for Distributed Cloud Edge Network API API. + +## All entries + +Classes, methods and properties & attributes for +Distributed Cloud Edge Network API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest/summary_property.html) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py index a75a5fe01618..89d66c38ca2b 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py @@ -198,11 +198,40 @@ class Subnet(proto.Message): vlan_id (int): Optional. VLAN id provided by user. If not specified we assign one automatically. + bonding_type (google.cloud.edgenetwork_v1.types.Subnet.BondingType): + Optional. A bonding type in the subnet + creation specifies whether a VLAN being created + will be present on Bonded or Non-Bonded or Both + port types. In addition, this flag is to be used + to set the specific network configuration which + clusters can then use for their workloads based + on the bonding choice. state (google.cloud.edgenetwork_v1.types.ResourceState): Output only. Current stage of the resource to the device by config push. """ + class BondingType(proto.Enum): + r"""Bonding type in the subnet. + + Values: + BONDING_TYPE_UNSPECIFIED (0): + Unspecified + Bonding type will be unspecified by default and + if the user chooses to not specify a bonding + type at time of creating the VLAN. This will be + treated as mixed bonding where the VLAN will + have both bonded and non-bonded connectivity to + machines. + BONDED (1): + Single homed. + NON_BONDED (2): + Multi homed. + """ + BONDING_TYPE_UNSPECIFIED = 0 + BONDED = 1 + NON_BONDED = 2 + name: str = proto.Field( proto.STRING, number=1, @@ -242,6 +271,11 @@ class Subnet(proto.Message): proto.INT32, number=9, ) + bonding_type: BondingType = proto.Field( + proto.ENUM, + number=11, + enum=BondingType, + ) state: "ResourceState" = proto.Field( proto.ENUM, number=10, @@ -996,6 +1030,7 @@ class BgpStatus(proto.Enum): The DOWN state indicating BGP session is not established yet. """ + UNKNOWN = 0 UP = 1 DOWN = 2 diff --git a/packages/google-cloud-edgenetwork/noxfile.py b/packages/google-cloud-edgenetwork/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-edgenetwork/noxfile.py +++ b/packages/google-cloud-edgenetwork/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json index 04d64b8b1330..14a96300d2ab 100644 --- a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json +++ b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgenetwork", - "version": "0.1.0" + "version": "0.1.8" }, "snippets": [ { diff --git a/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py b/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py index afaedb54c271..c2bad4440a94 100644 --- a/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py +++ b/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py @@ -1869,13 +1869,13 @@ def test_list_zones_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_zones(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2807,13 +2807,13 @@ def test_list_networks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_networks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4879,13 +4879,13 @@ def test_list_subnets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_subnets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5059,6 +5059,7 @@ def test_get_subnet(request_type, transport: str = "grpc"): ipv4_cidr=["ipv4_cidr_value"], ipv6_cidr=["ipv6_cidr_value"], vlan_id=733, + bonding_type=resources.Subnet.BondingType.BONDED, state=resources.ResourceState.STATE_PENDING, ) response = client.get_subnet(request) @@ -5077,6 +5078,7 @@ def test_get_subnet(request_type, transport: str = "grpc"): assert response.ipv4_cidr == ["ipv4_cidr_value"] assert response.ipv6_cidr == ["ipv6_cidr_value"] assert response.vlan_id == 733 + assert response.bonding_type == resources.Subnet.BondingType.BONDED assert response.state == resources.ResourceState.STATE_PENDING @@ -5182,6 +5184,7 @@ async def test_get_subnet_empty_call_async(): ipv4_cidr=["ipv4_cidr_value"], ipv6_cidr=["ipv6_cidr_value"], vlan_id=733, + bonding_type=resources.Subnet.BondingType.BONDED, state=resources.ResourceState.STATE_PENDING, ) ) @@ -5259,6 +5262,7 @@ async def test_get_subnet_async( ipv4_cidr=["ipv4_cidr_value"], ipv6_cidr=["ipv6_cidr_value"], vlan_id=733, + bonding_type=resources.Subnet.BondingType.BONDED, state=resources.ResourceState.STATE_PENDING, ) ) @@ -5278,6 +5282,7 @@ async def test_get_subnet_async( assert response.ipv4_cidr == ["ipv4_cidr_value"] assert response.ipv6_cidr == ["ipv6_cidr_value"] assert response.vlan_id == 733 + assert response.bonding_type == resources.Subnet.BondingType.BONDED assert response.state == resources.ResourceState.STATE_PENDING @@ -7009,13 +7014,13 @@ def test_list_interconnects_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_interconnects(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8395,13 +8400,13 @@ def test_list_interconnect_attachments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_interconnect_attachments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10206,13 +10211,13 @@ def test_list_routers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_routers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15321,6 +15326,7 @@ def test_get_subnet_rest(request_type): ipv4_cidr=["ipv4_cidr_value"], ipv6_cidr=["ipv6_cidr_value"], vlan_id=733, + bonding_type=resources.Subnet.BondingType.BONDED, state=resources.ResourceState.STATE_PENDING, ) @@ -15343,6 +15349,7 @@ def test_get_subnet_rest(request_type): assert response.ipv4_cidr == ["ipv4_cidr_value"] assert response.ipv6_cidr == ["ipv6_cidr_value"] assert response.vlan_id == 733 + assert response.bonding_type == resources.Subnet.BondingType.BONDED assert response.state == resources.ResourceState.STATE_PENDING @@ -15632,6 +15639,7 @@ def test_create_subnet_rest(request_type): "ipv4_cidr": ["ipv4_cidr_value1", "ipv4_cidr_value2"], "ipv6_cidr": ["ipv6_cidr_value1", "ipv6_cidr_value2"], "vlan_id": 733, + "bonding_type": 1, "state": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -16049,6 +16057,7 @@ def test_update_subnet_rest(request_type): "ipv4_cidr": ["ipv4_cidr_value1", "ipv4_cidr_value2"], "ipv6_cidr": ["ipv6_cidr_value1", "ipv6_cidr_value2"], "vlan_id": 733, + "bonding_type": 1, "state": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. diff --git a/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py b/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py index f476ade8d27d..8bc8f3647e25 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py +++ b/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py @@ -2476,13 +2476,13 @@ def test_list_entity_reconciliation_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entity_reconciliation_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py b/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py index a2f05bc0b85d..bca4aba4769f 100644 --- a/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py +++ b/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py @@ -2398,13 +2398,13 @@ def test_list_contacts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_contacts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3621,13 +3621,13 @@ def test_compute_contacts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.compute_contacts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index babd3f45617d..bfc157ac6ab4 100644 --- a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1889,13 +1889,13 @@ def test_list_triggers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_triggers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4005,13 +4005,13 @@ def test_list_channels_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channels(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6080,13 +6080,13 @@ def test_list_providers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_providers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7078,13 +7078,13 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channel_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py b/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py index 115f6c0bc7eb..6d5118e0e979 100644 --- a/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py +++ b/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py @@ -1620,13 +1620,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4328,13 +4328,13 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_snapshots(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6422,13 +6422,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py index 5efcec6badf5..723e267e39f3 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py @@ -1536,13 +1536,13 @@ def test_list_functions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_functions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py index 85f5c12faf30..84c99aad9892 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py @@ -1964,13 +1964,13 @@ def test_list_functions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_functions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-gdchardwaremanagement/.OwlBot.yaml b/packages/google-cloud-gdchardwaremanagement/.OwlBot.yaml new file mode 100644 index 000000000000..ad3df44b591d --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/gdchardwaremanagement/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-gdchardwaremanagement/$1 +api-name: google-cloud-gdchardwaremanagement diff --git a/packages/google-cloud-gdchardwaremanagement/.coveragerc b/packages/google-cloud-gdchardwaremanagement/.coveragerc new file mode 100644 index 000000000000..782c0cbc9819 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/gdchardwaremanagement/__init__.py + google/cloud/gdchardwaremanagement/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-gdchardwaremanagement/.flake8 b/packages/google-cloud-gdchardwaremanagement/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-gdchardwaremanagement/.gitignore b/packages/google-cloud-gdchardwaremanagement/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json b/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json new file mode 100644 index 000000000000..eef547bfe0f9 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-gdchardwaremanagement", + "name_pretty": "GDC Hardware Management API", + "api_description": "Google Distributed Cloud connected allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center.", + "product_documentation": "https://cloud.google.com/distributed-cloud/edge/latest/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1563150", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-gdchardwaremanagement", + "api_id": "gdchardwaremanagement.googleapis.com", + "default_version": "v1alpha", + "codeowner_team": "", + "api_shortname": "gdchardwaremanagement" +} diff --git a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md new file mode 100644 index 000000000000..8f053157d8fa --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.0...google-cloud-gdchardwaremanagement-v0.1.1) (2024-06-27) + + +### Features + +* add additional zone states ([a4bfef9](https://github.com/googleapis/google-cloud-python/commit/a4bfef92d5b6f30e40ef257b33748ce4b708e2ff)) + +## 0.1.0 (2024-06-26) + + +### Features + +* add initial files for google.cloud.gdchardwaremanagement.v1alpha ([#12824](https://github.com/googleapis/google-cloud-python/issues/12824)) ([6c02375](https://github.com/googleapis/google-cloud-python/commit/6c02375e05dba7005ec9137ed7c5959127a9be46)) + +## Changelog diff --git a/packages/google-cloud-gdchardwaremanagement/CODE_OF_CONDUCT.md b/packages/google-cloud-gdchardwaremanagement/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-gdchardwaremanagement/CONTRIBUTING.rst b/packages/google-cloud-gdchardwaremanagement/CONTRIBUTING.rst new file mode 100644 index 000000000000..e2d3e9b6bd53 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-gdchardwaremanagement + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-gdchardwaremanagement/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-gdchardwaremanagement/LICENSE b/packages/google-cloud-gdchardwaremanagement/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-gdchardwaremanagement/MANIFEST.in b/packages/google-cloud-gdchardwaremanagement/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-gdchardwaremanagement/README.rst b/packages/google-cloud-gdchardwaremanagement/README.rst new file mode 100644 index 000000000000..c614d543d4b2 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/README.rst @@ -0,0 +1,108 @@ +Python Client for GDC Hardware Management API +============================================= + +|preview| |pypi| |versions| + +`GDC Hardware Management API`_: Google Distributed Cloud connected allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-gdchardwaremanagement.svg + :target: https://pypi.org/project/google-cloud-gdchardwaremanagement/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-gdchardwaremanagement.svg + :target: https://pypi.org/project/google-cloud-gdchardwaremanagement/ +.. _GDC Hardware Management API: https://cloud.google.com/distributed-cloud/edge/latest/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/distributed-cloud/edge/latest/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the GDC Hardware Management API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the GDC Hardware Management API.: https://cloud.google.com/distributed-cloud/edge/latest/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-gdchardwaremanagement + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-gdchardwaremanagement + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for GDC Hardware Management API + to see other available methods on the client. +- Read the `GDC Hardware Management API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _GDC Hardware Management API Product documentation: https://cloud.google.com/distributed-cloud/edge/latest/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-gdchardwaremanagement/docs/CHANGELOG.md b/packages/google-cloud-gdchardwaremanagement/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-gdchardwaremanagement/docs/README.rst b/packages/google-cloud-gdchardwaremanagement/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-gdchardwaremanagement/docs/_static/custom.css b/packages/google-cloud-gdchardwaremanagement/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-gdchardwaremanagement/docs/_templates/layout.html b/packages/google-cloud-gdchardwaremanagement/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-gdchardwaremanagement/docs/conf.py b/packages/google-cloud-gdchardwaremanagement/docs/conf.py new file mode 100644 index 000000000000..ecb8de7d3ca8 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-gdchardwaremanagement documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-gdchardwaremanagement" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-gdchardwaremanagement", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-gdchardwaremanagement-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-gdchardwaremanagement.tex", + "google-cloud-gdchardwaremanagement Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-gdchardwaremanagement", + "google-cloud-gdchardwaremanagement Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-gdchardwaremanagement", + "google-cloud-gdchardwaremanagement Documentation", + author, + "google-cloud-gdchardwaremanagement", + "google-cloud-gdchardwaremanagement Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/gdc_hardware_management.rst b/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/gdc_hardware_management.rst new file mode 100644 index 000000000000..35aa45944bc7 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/gdc_hardware_management.rst @@ -0,0 +1,10 @@ +GDCHardwareManagement +--------------------------------------- + +.. automodule:: google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management + :members: + :inherited-members: + +.. automodule:: google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/services_.rst b/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/services_.rst new file mode 100644 index 000000000000..cdf7b7319368 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Gdchardwaremanagement v1alpha API +=========================================================== +.. toctree:: + :maxdepth: 2 + + gdc_hardware_management diff --git a/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/types_.rst b/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/types_.rst new file mode 100644 index 000000000000..afbdf0b1e208 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/gdchardwaremanagement_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Gdchardwaremanagement v1alpha API +======================================================== + +.. automodule:: google.cloud.gdchardwaremanagement_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-gdchardwaremanagement/docs/index.rst b/packages/google-cloud-gdchardwaremanagement/docs/index.rst new file mode 100644 index 000000000000..946ea27d82b3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + gdchardwaremanagement_v1alpha/services_ + gdchardwaremanagement_v1alpha/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-gdchardwaremanagement`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-gdchardwaremanagement/docs/multiprocessing.rst b/packages/google-cloud-gdchardwaremanagement/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-gdchardwaremanagement/docs/summary_overview.md b/packages/google-cloud-gdchardwaremanagement/docs/summary_overview.md new file mode 100644 index 000000000000..778bb20d2d49 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# GDC Hardware Management API API + +Overview of the APIs available for GDC Hardware Management API API. + +## All entries + +Classes, methods and properties & attributes for +GDC Hardware Management API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest/summary_property.html) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py new file mode 100644 index 000000000000..8f1261e786ed --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.gdchardwaremanagement import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.async_client import ( + GDCHardwareManagementAsyncClient, +) +from google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.client import ( + GDCHardwareManagementClient, +) +from google.cloud.gdchardwaremanagement_v1alpha.types.resources import ( + ChangeLogEntry, + Comment, + Contact, + Dimensions, + Hardware, + HardwareConfig, + HardwareGroup, + HardwareInstallationInfo, + HardwareLocation, + HardwarePhysicalInfo, + Order, + OrganizationContact, + PowerSupply, + RackSpace, + Site, + Sku, + SkuConfig, + SkuInstance, + Subnet, + TimePeriod, + Zone, + ZoneNetworkConfig, +) +from google.cloud.gdchardwaremanagement_v1alpha.types.service import ( + CreateCommentRequest, + CreateHardwareGroupRequest, + CreateHardwareRequest, + CreateOrderRequest, + CreateSiteRequest, + CreateZoneRequest, + DeleteHardwareGroupRequest, + DeleteHardwareRequest, + DeleteOrderRequest, + DeleteZoneRequest, + GetChangeLogEntryRequest, + GetCommentRequest, + GetHardwareGroupRequest, + GetHardwareRequest, + GetOrderRequest, + GetSiteRequest, + GetSkuRequest, + GetZoneRequest, + ListChangeLogEntriesRequest, + ListChangeLogEntriesResponse, + ListCommentsRequest, + ListCommentsResponse, + ListHardwareGroupsRequest, + ListHardwareGroupsResponse, + ListHardwareRequest, + ListHardwareResponse, + ListOrdersRequest, + ListOrdersResponse, + ListSitesRequest, + ListSitesResponse, + ListSkusRequest, + ListSkusResponse, + ListZonesRequest, + ListZonesResponse, + OperationMetadata, + SignalZoneStateRequest, + SubmitOrderRequest, + UpdateHardwareGroupRequest, + UpdateHardwareRequest, + UpdateOrderRequest, + UpdateSiteRequest, + UpdateZoneRequest, +) + +__all__ = ( + "GDCHardwareManagementClient", + "GDCHardwareManagementAsyncClient", + "ChangeLogEntry", + "Comment", + "Contact", + "Dimensions", + "Hardware", + "HardwareConfig", + "HardwareGroup", + "HardwareInstallationInfo", + "HardwareLocation", + "HardwarePhysicalInfo", + "Order", + "OrganizationContact", + "RackSpace", + "Site", + "Sku", + "SkuConfig", + "SkuInstance", + "Subnet", + "TimePeriod", + "Zone", + "ZoneNetworkConfig", + "PowerSupply", + "CreateCommentRequest", + "CreateHardwareGroupRequest", + "CreateHardwareRequest", + "CreateOrderRequest", + "CreateSiteRequest", + "CreateZoneRequest", + "DeleteHardwareGroupRequest", + "DeleteHardwareRequest", + "DeleteOrderRequest", + "DeleteZoneRequest", + "GetChangeLogEntryRequest", + "GetCommentRequest", + "GetHardwareGroupRequest", + "GetHardwareRequest", + "GetOrderRequest", + "GetSiteRequest", + "GetSkuRequest", + "GetZoneRequest", + "ListChangeLogEntriesRequest", + "ListChangeLogEntriesResponse", + "ListCommentsRequest", + "ListCommentsResponse", + "ListHardwareGroupsRequest", + "ListHardwareGroupsResponse", + "ListHardwareRequest", + "ListHardwareResponse", + "ListOrdersRequest", + "ListOrdersResponse", + "ListSitesRequest", + "ListSitesResponse", + "ListSkusRequest", + "ListSkusResponse", + "ListZonesRequest", + "ListZonesResponse", + "OperationMetadata", + "SignalZoneStateRequest", + "SubmitOrderRequest", + "UpdateHardwareGroupRequest", + "UpdateHardwareRequest", + "UpdateOrderRequest", + "UpdateSiteRequest", + "UpdateZoneRequest", +) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py new file mode 100644 index 000000000000..0c7cc68730c4 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/py.typed b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/py.typed new file mode 100644 index 000000000000..7c93e40af23e --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-gdchardwaremanagement package uses inline types. diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py new file mode 100644 index 000000000000..adfdd5d5be4b --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.gdchardwaremanagement_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.gdc_hardware_management import ( + GDCHardwareManagementAsyncClient, + GDCHardwareManagementClient, +) +from .types.resources import ( + ChangeLogEntry, + Comment, + Contact, + Dimensions, + Hardware, + HardwareConfig, + HardwareGroup, + HardwareInstallationInfo, + HardwareLocation, + HardwarePhysicalInfo, + Order, + OrganizationContact, + PowerSupply, + RackSpace, + Site, + Sku, + SkuConfig, + SkuInstance, + Subnet, + TimePeriod, + Zone, + ZoneNetworkConfig, +) +from .types.service import ( + CreateCommentRequest, + CreateHardwareGroupRequest, + CreateHardwareRequest, + CreateOrderRequest, + CreateSiteRequest, + CreateZoneRequest, + DeleteHardwareGroupRequest, + DeleteHardwareRequest, + DeleteOrderRequest, + DeleteZoneRequest, + GetChangeLogEntryRequest, + GetCommentRequest, + GetHardwareGroupRequest, + GetHardwareRequest, + GetOrderRequest, + GetSiteRequest, + GetSkuRequest, + GetZoneRequest, + ListChangeLogEntriesRequest, + ListChangeLogEntriesResponse, + ListCommentsRequest, + ListCommentsResponse, + ListHardwareGroupsRequest, + ListHardwareGroupsResponse, + ListHardwareRequest, + ListHardwareResponse, + ListOrdersRequest, + ListOrdersResponse, + ListSitesRequest, + ListSitesResponse, + ListSkusRequest, + ListSkusResponse, + ListZonesRequest, + ListZonesResponse, + OperationMetadata, + SignalZoneStateRequest, + SubmitOrderRequest, + UpdateHardwareGroupRequest, + UpdateHardwareRequest, + UpdateOrderRequest, + UpdateSiteRequest, + UpdateZoneRequest, +) + +__all__ = ( + "GDCHardwareManagementAsyncClient", + "ChangeLogEntry", + "Comment", + "Contact", + "CreateCommentRequest", + "CreateHardwareGroupRequest", + "CreateHardwareRequest", + "CreateOrderRequest", + "CreateSiteRequest", + "CreateZoneRequest", + "DeleteHardwareGroupRequest", + "DeleteHardwareRequest", + "DeleteOrderRequest", + "DeleteZoneRequest", + "Dimensions", + "GDCHardwareManagementClient", + "GetChangeLogEntryRequest", + "GetCommentRequest", + "GetHardwareGroupRequest", + "GetHardwareRequest", + "GetOrderRequest", + "GetSiteRequest", + "GetSkuRequest", + "GetZoneRequest", + "Hardware", + "HardwareConfig", + "HardwareGroup", + "HardwareInstallationInfo", + "HardwareLocation", + "HardwarePhysicalInfo", + "ListChangeLogEntriesRequest", + "ListChangeLogEntriesResponse", + "ListCommentsRequest", + "ListCommentsResponse", + "ListHardwareGroupsRequest", + "ListHardwareGroupsResponse", + "ListHardwareRequest", + "ListHardwareResponse", + "ListOrdersRequest", + "ListOrdersResponse", + "ListSitesRequest", + "ListSitesResponse", + "ListSkusRequest", + "ListSkusResponse", + "ListZonesRequest", + "ListZonesResponse", + "OperationMetadata", + "Order", + "OrganizationContact", + "PowerSupply", + "RackSpace", + "SignalZoneStateRequest", + "Site", + "Sku", + "SkuConfig", + "SkuInstance", + "SubmitOrderRequest", + "Subnet", + "TimePeriod", + "UpdateHardwareGroupRequest", + "UpdateHardwareRequest", + "UpdateOrderRequest", + "UpdateSiteRequest", + "UpdateZoneRequest", + "Zone", + "ZoneNetworkConfig", +) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..b5713b6e1b21 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json @@ -0,0 +1,523 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.gdchardwaremanagement_v1alpha", + "protoPackage": "google.cloud.gdchardwaremanagement.v1alpha", + "schema": "1.0", + "services": { + "GDCHardwareManagement": { + "clients": { + "grpc": { + "libraryClient": "GDCHardwareManagementClient", + "rpcs": { + "CreateComment": { + "methods": [ + "create_comment" + ] + }, + "CreateHardware": { + "methods": [ + "create_hardware" + ] + }, + "CreateHardwareGroup": { + "methods": [ + "create_hardware_group" + ] + }, + "CreateOrder": { + "methods": [ + "create_order" + ] + }, + "CreateSite": { + "methods": [ + "create_site" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteHardware": { + "methods": [ + "delete_hardware" + ] + }, + "DeleteHardwareGroup": { + "methods": [ + "delete_hardware_group" + ] + }, + "DeleteOrder": { + "methods": [ + "delete_order" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetChangeLogEntry": { + "methods": [ + "get_change_log_entry" + ] + }, + "GetComment": { + "methods": [ + "get_comment" + ] + }, + "GetHardware": { + "methods": [ + "get_hardware" + ] + }, + "GetHardwareGroup": { + "methods": [ + "get_hardware_group" + ] + }, + "GetOrder": { + "methods": [ + "get_order" + ] + }, + "GetSite": { + "methods": [ + "get_site" + ] + }, + "GetSku": { + "methods": [ + "get_sku" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListChangeLogEntries": { + "methods": [ + "list_change_log_entries" + ] + }, + "ListComments": { + "methods": [ + "list_comments" + ] + }, + "ListHardware": { + "methods": [ + "list_hardware" + ] + }, + "ListHardwareGroups": { + "methods": [ + "list_hardware_groups" + ] + }, + "ListOrders": { + "methods": [ + "list_orders" + ] + }, + "ListSites": { + "methods": [ + "list_sites" + ] + }, + "ListSkus": { + "methods": [ + "list_skus" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "SignalZoneState": { + "methods": [ + "signal_zone_state" + ] + }, + "SubmitOrder": { + "methods": [ + "submit_order" + ] + }, + "UpdateHardware": { + "methods": [ + "update_hardware" + ] + }, + "UpdateHardwareGroup": { + "methods": [ + "update_hardware_group" + ] + }, + "UpdateOrder": { + "methods": [ + "update_order" + ] + }, + "UpdateSite": { + "methods": [ + "update_site" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GDCHardwareManagementAsyncClient", + "rpcs": { + "CreateComment": { + "methods": [ + "create_comment" + ] + }, + "CreateHardware": { + "methods": [ + "create_hardware" + ] + }, + "CreateHardwareGroup": { + "methods": [ + "create_hardware_group" + ] + }, + "CreateOrder": { + "methods": [ + "create_order" + ] + }, + "CreateSite": { + "methods": [ + "create_site" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteHardware": { + "methods": [ + "delete_hardware" + ] + }, + "DeleteHardwareGroup": { + "methods": [ + "delete_hardware_group" + ] + }, + "DeleteOrder": { + "methods": [ + "delete_order" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetChangeLogEntry": { + "methods": [ + "get_change_log_entry" + ] + }, + "GetComment": { + "methods": [ + "get_comment" + ] + }, + "GetHardware": { + "methods": [ + "get_hardware" + ] + }, + "GetHardwareGroup": { + "methods": [ + "get_hardware_group" + ] + }, + "GetOrder": { + "methods": [ + "get_order" + ] + }, + "GetSite": { + "methods": [ + "get_site" + ] + }, + "GetSku": { + "methods": [ + "get_sku" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListChangeLogEntries": { + "methods": [ + "list_change_log_entries" + ] + }, + "ListComments": { + "methods": [ + "list_comments" + ] + }, + "ListHardware": { + "methods": [ + "list_hardware" + ] + }, + "ListHardwareGroups": { + "methods": [ + "list_hardware_groups" + ] + }, + "ListOrders": { + "methods": [ + "list_orders" + ] + }, + "ListSites": { + "methods": [ + "list_sites" + ] + }, + "ListSkus": { + "methods": [ + "list_skus" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "SignalZoneState": { + "methods": [ + "signal_zone_state" + ] + }, + "SubmitOrder": { + "methods": [ + "submit_order" + ] + }, + "UpdateHardware": { + "methods": [ + "update_hardware" + ] + }, + "UpdateHardwareGroup": { + "methods": [ + "update_hardware_group" + ] + }, + "UpdateOrder": { + "methods": [ + "update_order" + ] + }, + "UpdateSite": { + "methods": [ + "update_site" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + }, + "rest": { + "libraryClient": "GDCHardwareManagementClient", + "rpcs": { + "CreateComment": { + "methods": [ + "create_comment" + ] + }, + "CreateHardware": { + "methods": [ + "create_hardware" + ] + }, + "CreateHardwareGroup": { + "methods": [ + "create_hardware_group" + ] + }, + "CreateOrder": { + "methods": [ + "create_order" + ] + }, + "CreateSite": { + "methods": [ + "create_site" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteHardware": { + "methods": [ + "delete_hardware" + ] + }, + "DeleteHardwareGroup": { + "methods": [ + "delete_hardware_group" + ] + }, + "DeleteOrder": { + "methods": [ + "delete_order" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetChangeLogEntry": { + "methods": [ + "get_change_log_entry" + ] + }, + "GetComment": { + "methods": [ + "get_comment" + ] + }, + "GetHardware": { + "methods": [ + "get_hardware" + ] + }, + "GetHardwareGroup": { + "methods": [ + "get_hardware_group" + ] + }, + "GetOrder": { + "methods": [ + "get_order" + ] + }, + "GetSite": { + "methods": [ + "get_site" + ] + }, + "GetSku": { + "methods": [ + "get_sku" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListChangeLogEntries": { + "methods": [ + "list_change_log_entries" + ] + }, + "ListComments": { + "methods": [ + "list_comments" + ] + }, + "ListHardware": { + "methods": [ + "list_hardware" + ] + }, + "ListHardwareGroups": { + "methods": [ + "list_hardware_groups" + ] + }, + "ListOrders": { + "methods": [ + "list_orders" + ] + }, + "ListSites": { + "methods": [ + "list_sites" + ] + }, + "ListSkus": { + "methods": [ + "list_skus" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "SignalZoneState": { + "methods": [ + "signal_zone_state" + ] + }, + "SubmitOrder": { + "methods": [ + "submit_order" + ] + }, + "UpdateHardware": { + "methods": [ + "update_hardware" + ] + }, + "UpdateHardwareGroup": { + "methods": [ + "update_hardware_group" + ] + }, + "UpdateOrder": { + "methods": [ + "update_order" + ] + }, + "UpdateSite": { + "methods": [ + "update_site" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py new file mode 100644 index 000000000000..0c7cc68730c4 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/py.typed b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/py.typed new file mode 100644 index 000000000000..7c93e40af23e --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-gdchardwaremanagement package uses inline types. diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/__init__.py new file mode 100644 index 000000000000..bfb742c90cc0 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GDCHardwareManagementAsyncClient +from .client import GDCHardwareManagementClient + +__all__ = ( + "GDCHardwareManagementClient", + "GDCHardwareManagementAsyncClient", +) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py new file mode 100644 index 000000000000..f62757787450 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py @@ -0,0 +1,4802 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management import ( + pagers, +) +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + +from .client import GDCHardwareManagementClient +from .transports.base import DEFAULT_CLIENT_INFO, GDCHardwareManagementTransport +from .transports.grpc_asyncio import GDCHardwareManagementGrpcAsyncIOTransport + + +class GDCHardwareManagementAsyncClient: + """The GDC Hardware Management service.""" + + _client: GDCHardwareManagementClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GDCHardwareManagementClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GDCHardwareManagementClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = GDCHardwareManagementClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = GDCHardwareManagementClient._DEFAULT_UNIVERSE + + change_log_entry_path = staticmethod( + GDCHardwareManagementClient.change_log_entry_path + ) + parse_change_log_entry_path = staticmethod( + GDCHardwareManagementClient.parse_change_log_entry_path + ) + comment_path = staticmethod(GDCHardwareManagementClient.comment_path) + parse_comment_path = staticmethod(GDCHardwareManagementClient.parse_comment_path) + hardware_path = staticmethod(GDCHardwareManagementClient.hardware_path) + parse_hardware_path = staticmethod(GDCHardwareManagementClient.parse_hardware_path) + hardware_group_path = staticmethod(GDCHardwareManagementClient.hardware_group_path) + parse_hardware_group_path = staticmethod( + GDCHardwareManagementClient.parse_hardware_group_path + ) + order_path = staticmethod(GDCHardwareManagementClient.order_path) + parse_order_path = staticmethod(GDCHardwareManagementClient.parse_order_path) + site_path = staticmethod(GDCHardwareManagementClient.site_path) + parse_site_path = staticmethod(GDCHardwareManagementClient.parse_site_path) + sku_path = staticmethod(GDCHardwareManagementClient.sku_path) + parse_sku_path = staticmethod(GDCHardwareManagementClient.parse_sku_path) + zone_path = staticmethod(GDCHardwareManagementClient.zone_path) + parse_zone_path = staticmethod(GDCHardwareManagementClient.parse_zone_path) + common_billing_account_path = staticmethod( + GDCHardwareManagementClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GDCHardwareManagementClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GDCHardwareManagementClient.common_folder_path) + parse_common_folder_path = staticmethod( + GDCHardwareManagementClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GDCHardwareManagementClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GDCHardwareManagementClient.parse_common_organization_path + ) + common_project_path = staticmethod(GDCHardwareManagementClient.common_project_path) + parse_common_project_path = staticmethod( + GDCHardwareManagementClient.parse_common_project_path + ) + common_location_path = staticmethod( + GDCHardwareManagementClient.common_location_path + ) + parse_common_location_path = staticmethod( + GDCHardwareManagementClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GDCHardwareManagementAsyncClient: The constructed client. + """ + return GDCHardwareManagementClient.from_service_account_info.__func__(GDCHardwareManagementAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GDCHardwareManagementAsyncClient: The constructed client. + """ + return GDCHardwareManagementClient.from_service_account_file.__func__(GDCHardwareManagementAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GDCHardwareManagementClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GDCHardwareManagementTransport: + """Returns the transport used by the client instance. + + Returns: + GDCHardwareManagementTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(GDCHardwareManagementClient).get_transport_class, + type(GDCHardwareManagementClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + GDCHardwareManagementTransport, + Callable[..., GDCHardwareManagementTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gdc hardware management async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GDCHardwareManagementTransport,Callable[..., GDCHardwareManagementTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GDCHardwareManagementTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GDCHardwareManagementClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_orders( + self, + request: Optional[Union[service.ListOrdersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOrdersAsyncPager: + r"""Lists orders in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_orders(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListOrdersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_orders(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersRequest, dict]]): + The request object. A request to list orders. + parent (:class:`str`): + Required. The project and location to list orders in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListOrdersAsyncPager: + A list of orders. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListOrdersRequest): + request = service.ListOrdersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_orders + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOrdersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_order( + self, + request: Optional[Union[service.GetOrderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Order: + r"""Gets details of an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetOrderRequest( + name="name_value", + ) + + # Make the request + response = await client.get_order(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetOrderRequest, dict]]): + The request object. A request to get an order. + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Order: + An order for GDC hardware. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetOrderRequest): + request = service.GetOrderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_order( + self, + request: Optional[Union[service.CreateOrderRequest, dict]] = None, + *, + parent: Optional[str] = None, + order: Optional[resources.Order] = None, + order_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new order in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_create_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.CreateOrderRequest( + parent="parent_value", + order=order, + ) + + # Make the request + operation = client.create_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateOrderRequest, dict]]): + The request object. A request to create an order. + parent (:class:`str`): + Required. The project and location to create the order + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order`): + Required. The order to create. + This corresponds to the ``order`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_id (:class:`str`): + Optional. ID used to uniquely identify the Order within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. + Only lowercase characters, numbers and ``-`` are + accepted. The ``-`` character cannot be the first or the + last one. A system generated ID will be used if the + field is not set. + + The order.name field in the request will be ignored. + + This corresponds to the ``order_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order` + An order for GDC hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, order, order_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateOrderRequest): + request = service.CreateOrderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if order is not None: + request.order = order + if order_id is not None: + request.order_id = order_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Order, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_order( + self, + request: Optional[Union[service.UpdateOrderRequest, dict]] = None, + *, + order: Optional[resources.Order] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_update_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.UpdateOrderRequest( + order=order, + ) + + # Make the request + operation = client.update_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateOrderRequest, dict]]): + The request object. A request to update an order. + order (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order`): + Required. The order to update. + This corresponds to the ``order`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask to specify the fields in the Order to + overwrite with this update. The fields specified in the + update_mask are relative to the order, not the full + request. A field will be overwritten if it is in the + mask. If you don't provide a mask then all fields will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order` + An order for GDC hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([order, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateOrderRequest): + request = service.UpdateOrderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if order is not None: + request.order = order + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("order.name", request.order.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Order, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_order( + self, + request: Optional[Union[service.DeleteOrderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_delete_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteOrderRequest, dict]]): + The request object. A request to delete an order. + name (:class:`str`): + Required. The name of the order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteOrderRequest): + request = service.DeleteOrderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def submit_order( + self, + request: Optional[Union[service.SubmitOrderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Submits an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_submit_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SubmitOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.submit_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.SubmitOrderRequest, dict]]): + The request object. A request to submit an order. + name (:class:`str`): + Required. The name of the order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order` + An order for GDC hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SubmitOrderRequest): + request = service.SubmitOrderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.submit_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Order, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_sites( + self, + request: Optional[Union[service.ListSitesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSitesAsyncPager: + r"""Lists sites in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_sites(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesRequest, dict]]): + The request object. A request to list sites. + parent (:class:`str`): + Required. The project and location to list sites in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSitesAsyncPager: + A list of sites. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSitesRequest): + request = service.ListSitesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sites + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSitesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_site( + self, + request: Optional[Union[service.GetSiteRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Site: + r"""Gets details of a site. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetSiteRequest, dict]]): + The request object. A request to get a site. + name (:class:`str`): + Required. The name of the site. Format: + ``projects/{project}/locations/{location}/sites/{site}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Site: + A physical site where hardware will + be installed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetSiteRequest): + request = service.GetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_site( + self, + request: Optional[Union[service.CreateSiteRequest, dict]] = None, + *, + parent: Optional[str] = None, + site: Optional[resources.Site] = None, + site_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new site in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_create_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.CreateSiteRequest( + parent="parent_value", + site=site, + ) + + # Make the request + operation = client.create_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateSiteRequest, dict]]): + The request object. A request to create a site. + parent (:class:`str`): + Required. The project and location to create the site + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + site (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Site`): + Required. The site to create. + This corresponds to the ``site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + site_id (:class:`str`): + Optional. ID used to uniquely identify the Site within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. + Only lowercase characters, numbers and ``-`` are + accepted. The ``-`` character cannot be the first or the + last one. A system generated ID will be used if the + field is not set. + + The site.name field in the request will be ignored. + + This corresponds to the ``site_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Site` + A physical site where hardware will be installed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, site, site_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateSiteRequest): + request = service.CreateSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if site is not None: + request.site = site + if site_id is not None: + request.site_id = site_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_site + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Site, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_site( + self, + request: Optional[Union[service.UpdateSiteRequest, dict]] = None, + *, + site: Optional[resources.Site] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a site. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_update_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.UpdateSiteRequest( + site=site, + ) + + # Make the request + operation = client.update_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateSiteRequest, dict]]): + The request object. A request to update a site. + site (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Site`): + Required. The site to update. + This corresponds to the ``site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask to specify the fields in the Site to + overwrite with this update. The fields specified in the + update_mask are relative to the site, not the full + request. A field will be overwritten if it is in the + mask. If you don't provide a mask then all fields will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Site` + A physical site where hardware will be installed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([site, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateSiteRequest): + request = service.UpdateSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if site is not None: + request.site = site + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_site + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site.name", request.site.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Site, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_hardware_groups( + self, + request: Optional[Union[service.ListHardwareGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHardwareGroupsAsyncPager: + r"""Lists hardware groups in a given order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_hardware_groups(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsRequest, dict]]): + The request object. A request to list hardware groups. + parent (:class:`str`): + Required. The order to list hardware groups in. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwareGroupsAsyncPager: + A list of hardware groups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListHardwareGroupsRequest): + request = service.ListHardwareGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_hardware_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListHardwareGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_hardware_group( + self, + request: Optional[Union[service.GetHardwareGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.HardwareGroup: + r"""Gets details of a hardware group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_hardware_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareGroupRequest, dict]]): + The request object. A request to get a hardware group. + name (:class:`str`): + Required. The name of the hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup: + A group of hardware that is part of + the same order, has the same SKU, and is + delivered to the same site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetHardwareGroupRequest): + request = service.GetHardwareGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_hardware_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_hardware_group( + self, + request: Optional[Union[service.CreateHardwareGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + hardware_group: Optional[resources.HardwareGroup] = None, + hardware_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new hardware group in a given order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_create_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareGroupRequest( + parent="parent_value", + hardware_group=hardware_group, + ) + + # Make the request + operation = client.create_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareGroupRequest, dict]]): + The request object. A request to create a hardware group. + parent (:class:`str`): + Required. The order to create the hardware group in. + Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware_group (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup`): + Required. The hardware group to + create. + + This corresponds to the ``hardware_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware_group_id (:class:`str`): + Optional. ID used to uniquely identify the HardwareGroup + within its parent scope. This field should contain at + most 63 characters and must start with lowercase + characters. Only lowercase characters, numbers and ``-`` + are accepted. The ``-`` character cannot be the first or + the last one. A system generated ID will be used if the + field is not set. + + The hardware_group.name field in the request will be + ignored. + + This corresponds to the ``hardware_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup` A group of hardware that is part of the same order, has the same SKU, and is + delivered to the same site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, hardware_group, hardware_group_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateHardwareGroupRequest): + request = service.CreateHardwareGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if hardware_group is not None: + request.hardware_group = hardware_group + if hardware_group_id is not None: + request.hardware_group_id = hardware_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_hardware_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.HardwareGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_hardware_group( + self, + request: Optional[Union[service.UpdateHardwareGroupRequest, dict]] = None, + *, + hardware_group: Optional[resources.HardwareGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a hardware group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_update_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareGroupRequest( + hardware_group=hardware_group, + ) + + # Make the request + operation = client.update_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareGroupRequest, dict]]): + The request object. A request to update a hardware group. + hardware_group (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup`): + Required. The hardware group to + update. + + This corresponds to the ``hardware_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask to specify the fields in the + HardwareGroup to overwrite with this update. The fields + specified in the update_mask are relative to the + hardware group, not the full request. A field will be + overwritten if it is in the mask. If you don't provide a + mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup` A group of hardware that is part of the same order, has the same SKU, and is + delivered to the same site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([hardware_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateHardwareGroupRequest): + request = service.UpdateHardwareGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if hardware_group is not None: + request.hardware_group = hardware_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_hardware_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("hardware_group.name", request.hardware_group.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.HardwareGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_hardware_group( + self, + request: Optional[Union[service.DeleteHardwareGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a hardware group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_delete_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareGroupRequest, dict]]): + The request object. A request to delete a hardware group. + name (:class:`str`): + Required. The name of the hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteHardwareGroupRequest): + request = service.DeleteHardwareGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_hardware_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_hardware( + self, + request: Optional[Union[service.ListHardwareRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHardwareAsyncPager: + r"""Lists hardware in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareRequest, dict]]): + The request object. A request to list hardware. + parent (:class:`str`): + Required. The project and location to list hardware in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwareAsyncPager: + A list of hardware. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListHardwareRequest): + request = service.ListHardwareRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_hardware + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListHardwareAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_hardware( + self, + request: Optional[Union[service.GetHardwareRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Hardware: + r"""Gets hardware details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareRequest( + name="name_value", + ) + + # Make the request + response = await client.get_hardware(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareRequest, dict]]): + The request object. A request to get hardware. + name (:class:`str`): + Required. The name of the hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Hardware: + An instance of hardware installed at + a site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetHardwareRequest): + request = service.GetHardwareRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_hardware + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_hardware( + self, + request: Optional[Union[service.CreateHardwareRequest, dict]] = None, + *, + parent: Optional[str] = None, + hardware: Optional[resources.Hardware] = None, + hardware_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates new hardware in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_create_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareRequest( + parent="parent_value", + hardware=hardware, + ) + + # Make the request + operation = client.create_hardware(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareRequest, dict]]): + The request object. A request to create hardware. + parent (:class:`str`): + Required. The project and location to create hardware + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Hardware`): + Required. The resource to create. + This corresponds to the ``hardware`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware_id (:class:`str`): + Optional. ID used to uniquely identify the Hardware + within its parent scope. This field should contain at + most 63 characters and must start with lowercase + characters. Only lowercase characters, numbers and ``-`` + are accepted. The ``-`` character cannot be the first or + the last one. A system generated ID will be used if the + field is not set. + + The hardware.name field in the request will be ignored. + + This corresponds to the ``hardware_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Hardware` + An instance of hardware installed at a site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, hardware, hardware_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateHardwareRequest): + request = service.CreateHardwareRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if hardware is not None: + request.hardware = hardware + if hardware_id is not None: + request.hardware_id = hardware_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_hardware + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Hardware, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_hardware( + self, + request: Optional[Union[service.UpdateHardwareRequest, dict]] = None, + *, + hardware: Optional[resources.Hardware] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates hardware parameters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_update_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareRequest( + hardware=hardware, + ) + + # Make the request + operation = client.update_hardware(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareRequest, dict]]): + The request object. A request to update hardware. + hardware (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Hardware`): + Required. The hardware to update. + This corresponds to the ``hardware`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask to specify the fields in the Hardware + to overwrite with this update. The fields specified in + the update_mask are relative to the hardware, not the + full request. A field will be overwritten if it is in + the mask. If you don't provide a mask then all fields + will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Hardware` + An instance of hardware installed at a site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([hardware, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateHardwareRequest): + request = service.UpdateHardwareRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if hardware is not None: + request.hardware = hardware + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_hardware + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("hardware.name", request.hardware.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Hardware, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_hardware( + self, + request: Optional[Union[service.DeleteHardwareRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes hardware. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_delete_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareRequest, dict]]): + The request object. A request to delete hardware. + name (:class:`str`): + Required. The name of the hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteHardwareRequest): + request = service.DeleteHardwareRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_hardware + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_comments( + self, + request: Optional[Union[service.ListCommentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCommentsAsyncPager: + r"""Lists the comments on an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_comments(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsRequest, dict]]): + The request object. A request to list comments. + parent (:class:`str`): + Required. The order to list comments on. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListCommentsAsyncPager: + A request to list comments. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListCommentsRequest): + request = service.ListCommentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_comments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCommentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_comment( + self, + request: Optional[Union[service.GetCommentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Gets the content of a comment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetCommentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetCommentRequest, dict]]): + The request object. A request to get a comment. + name (:class:`str`): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Comment: + A comment on an order. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetCommentRequest): + request = service.GetCommentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_comment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_comment( + self, + request: Optional[Union[service.CreateCommentRequest, dict]] = None, + *, + parent: Optional[str] = None, + comment: Optional[resources.Comment] = None, + comment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new comment on an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_create_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + comment = gdchardwaremanagement_v1alpha.Comment() + comment.text = "text_value" + + request = gdchardwaremanagement_v1alpha.CreateCommentRequest( + parent="parent_value", + comment=comment, + ) + + # Make the request + operation = client.create_comment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateCommentRequest, dict]]): + The request object. A request to create a comment. + parent (:class:`str`): + Required. The order to create the comment on. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + comment (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Comment`): + Required. The comment to create. + This corresponds to the ``comment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + comment_id (:class:`str`): + Optional. ID used to uniquely identify the Comment + within its parent scope. This field should contain at + most 63 characters and must start with lowercase + characters. Only lowercase characters, numbers and ``-`` + are accepted. The ``-`` character cannot be the first or + the last one. A system generated ID will be used if the + field is not set. + + The comment.name field in the request will be ignored. + + This corresponds to the ``comment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Comment` + A comment on an order. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, comment, comment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateCommentRequest): + request = service.CreateCommentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if comment is not None: + request.comment = comment + if comment_id is not None: + request.comment_id = comment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_comment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Comment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_change_log_entries( + self, + request: Optional[Union[service.ListChangeLogEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChangeLogEntriesAsyncPager: + r"""Lists the changes made to an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_change_log_entries(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListChangeLogEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_change_log_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesRequest, dict]]): + The request object. A request to list change log entries. + parent (:class:`str`): + Required. The order to list change log entries for. + Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListChangeLogEntriesAsyncPager: + A list of change log entries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListChangeLogEntriesRequest): + request = service.ListChangeLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_change_log_entries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChangeLogEntriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_change_log_entry( + self, + request: Optional[Union[service.GetChangeLogEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ChangeLogEntry: + r"""Gets details of a change to an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_change_log_entry(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetChangeLogEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_change_log_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetChangeLogEntryRequest, dict]]): + The request object. A request to get a change log entry. + name (:class:`str`): + Required. The name of the change log entry. Format: + ``projects/{project}/locations/{location}/orders/{order}/changeLogEntries/{change_log_entry}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.ChangeLogEntry: + A log entry of a change made to an + order. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetChangeLogEntryRequest): + request = service.GetChangeLogEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_change_log_entry + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_skus( + self, + request: Optional[Union[service.ListSkusRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSkusAsyncPager: + r"""Lists SKUs for a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_skus(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSkusRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_skus(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusRequest, dict]]): + The request object. A request to list SKUs. + parent (:class:`str`): + Required. The project and location to list SKUs in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSkusAsyncPager: + A list of SKUs. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSkusRequest): + request = service.ListSkusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_skus + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSkusAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sku( + self, + request: Optional[Union[service.GetSkuRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Sku: + r"""Gets details of an SKU. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_sku(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSkuRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sku(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetSkuRequest, dict]]): + The request object. A request to get an SKU. + name (:class:`str`): + Required. The name of the SKU. Format: + ``projects/{project}/locations/{location}/skus/{sku}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Sku: + A stock keeping unit (SKU) of GDC + hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetSkuRequest): + request = service.GetSkuRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_sku] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_zones( + self, + request: Optional[Union[service.ListZonesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListZonesAsyncPager: + r"""Lists zones in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_list_zones(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesRequest, dict]]): + The request object. A request to list zones. + parent (:class:`str`): + Required. The project and location to list zones in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListZonesAsyncPager: + A list of zones. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_zones + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListZonesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_zone( + self, + request: Optional[Union[service.GetZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Zone: + r"""Gets details of a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_get_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = await client.get_zone(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetZoneRequest, dict]]): + The request object. A request to get a zone. + name (:class:`str`): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Zone: + A zone holding a set of hardware. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_zone( + self, + request: Optional[Union[service.CreateZoneRequest, dict]] = None, + *, + parent: Optional[str] = None, + zone: Optional[resources.Zone] = None, + zone_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new zone in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_create_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.CreateZoneRequest( + parent="parent_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateZoneRequest, dict]]): + The request object. A request to create a zone. + parent (:class:`str`): + Required. The project and location to create the zone + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone`): + Required. The zone to create. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_id (:class:`str`): + Optional. ID used to uniquely identify the Zone within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. + Only lowercase characters, numbers and ``-`` are + accepted. The ``-`` character cannot be the first or the + last one. A system generated ID will be used if the + field is not set. + + The zone.name field in the request will be ignored. + + This corresponds to the ``zone_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone` + A zone holding a set of hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, zone, zone_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if zone is not None: + request.zone = zone + if zone_id is not None: + request.zone_id = zone_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_zone( + self, + request: Optional[Union[service.UpdateZoneRequest, dict]] = None, + *, + zone: Optional[resources.Zone] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_update_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateZoneRequest, dict]]): + The request object. A request to update a zone. + zone (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone`): + Required. The zone to update. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask to specify the fields in the Zone to + overwrite with this update. The fields specified in the + update_mask are relative to the zone, not the full + request. A field will be overwritten if it is in the + mask. If you don't provide a mask then all fields will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone` + A zone holding a set of hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([zone, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if zone is not None: + request.zone = zone + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("zone.name", request.zone.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_zone( + self, + request: Optional[Union[service.DeleteZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_delete_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteZoneRequest, dict]]): + The request object. A request to delete a zone. + name (:class:`str`): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def signal_zone_state( + self, + request: Optional[Union[service.SignalZoneStateRequest, dict]] = None, + *, + name: Optional[str] = None, + state_signal: Optional[service.SignalZoneStateRequest.StateSignal] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Signals the state of a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_signal_zone_state(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SignalZoneStateRequest( + name="name_value", + state_signal="FACTORY_TURNUP_CHECKS_FAILED", + ) + + # Make the request + operation = client.signal_zone_state(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest, dict]]): + The request object. A request to signal the state of a + zone. + name (:class:`str`): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + state_signal (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest.StateSignal`): + Required. The state signal to send + for this zone. + + This corresponds to the ``state_signal`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone` + A zone holding a set of hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, state_signal]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SignalZoneStateRequest): + request = service.SignalZoneStateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if state_signal is not None: + request.state_signal = state_signal + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.signal_zone_state + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GDCHardwareManagementAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GDCHardwareManagementAsyncClient",) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py new file mode 100644 index 000000000000..4cfb94a1615d --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py @@ -0,0 +1,5276 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management import ( + pagers, +) +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + +from .transports.base import DEFAULT_CLIENT_INFO, GDCHardwareManagementTransport +from .transports.grpc import GDCHardwareManagementGrpcTransport +from .transports.grpc_asyncio import GDCHardwareManagementGrpcAsyncIOTransport +from .transports.rest import GDCHardwareManagementRestTransport + + +class GDCHardwareManagementClientMeta(type): + """Metaclass for the GDCHardwareManagement client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GDCHardwareManagementTransport]] + _transport_registry["grpc"] = GDCHardwareManagementGrpcTransport + _transport_registry["grpc_asyncio"] = GDCHardwareManagementGrpcAsyncIOTransport + _transport_registry["rest"] = GDCHardwareManagementRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GDCHardwareManagementTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GDCHardwareManagementClient(metaclass=GDCHardwareManagementClientMeta): + """The GDC Hardware Management service.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "gdchardwaremanagement.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "gdchardwaremanagement.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GDCHardwareManagementClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GDCHardwareManagementClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GDCHardwareManagementTransport: + """Returns the transport used by the client instance. + + Returns: + GDCHardwareManagementTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def change_log_entry_path( + project: str, + location: str, + order: str, + change_log_entry: str, + ) -> str: + """Returns a fully-qualified change_log_entry string.""" + return "projects/{project}/locations/{location}/orders/{order}/changeLogEntries/{change_log_entry}".format( + project=project, + location=location, + order=order, + change_log_entry=change_log_entry, + ) + + @staticmethod + def parse_change_log_entry_path(path: str) -> Dict[str, str]: + """Parses a change_log_entry path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/orders/(?P.+?)/changeLogEntries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def comment_path( + project: str, + location: str, + order: str, + comment: str, + ) -> str: + """Returns a fully-qualified comment string.""" + return "projects/{project}/locations/{location}/orders/{order}/comments/{comment}".format( + project=project, + location=location, + order=order, + comment=comment, + ) + + @staticmethod + def parse_comment_path(path: str) -> Dict[str, str]: + """Parses a comment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/orders/(?P.+?)/comments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def hardware_path( + project: str, + location: str, + hardware: str, + ) -> str: + """Returns a fully-qualified hardware string.""" + return "projects/{project}/locations/{location}/hardware/{hardware}".format( + project=project, + location=location, + hardware=hardware, + ) + + @staticmethod + def parse_hardware_path(path: str) -> Dict[str, str]: + """Parses a hardware path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/hardware/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def hardware_group_path( + project: str, + location: str, + order: str, + hardware_group: str, + ) -> str: + """Returns a fully-qualified hardware_group string.""" + return "projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}".format( + project=project, + location=location, + order=order, + hardware_group=hardware_group, + ) + + @staticmethod + def parse_hardware_group_path(path: str) -> Dict[str, str]: + """Parses a hardware_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/orders/(?P.+?)/hardwareGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def order_path( + project: str, + location: str, + order: str, + ) -> str: + """Returns a fully-qualified order string.""" + return "projects/{project}/locations/{location}/orders/{order}".format( + project=project, + location=location, + order=order, + ) + + @staticmethod + def parse_order_path(path: str) -> Dict[str, str]: + """Parses a order path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/orders/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def site_path( + project: str, + location: str, + site: str, + ) -> str: + """Returns a fully-qualified site string.""" + return "projects/{project}/locations/{location}/sites/{site}".format( + project=project, + location=location, + site=site, + ) + + @staticmethod + def parse_site_path(path: str) -> Dict[str, str]: + """Parses a site path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sites/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def sku_path( + project: str, + location: str, + sku: str, + ) -> str: + """Returns a fully-qualified sku string.""" + return "projects/{project}/locations/{location}/skus/{sku}".format( + project=project, + location=location, + sku=sku, + ) + + @staticmethod + def parse_sku_path(path: str) -> Dict[str, str]: + """Parses a sku path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/skus/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def zone_path( + project: str, + location: str, + zone: str, + ) -> str: + """Returns a fully-qualified zone string.""" + return "projects/{project}/locations/{location}/zones/{zone}".format( + project=project, + location=location, + zone=zone, + ) + + @staticmethod + def parse_zone_path(path: str) -> Dict[str, str]: + """Parses a zone path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/zones/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GDCHardwareManagementClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GDCHardwareManagementClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + GDCHardwareManagementClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GDCHardwareManagementClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = GDCHardwareManagementClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or GDCHardwareManagementClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + GDCHardwareManagementTransport, + Callable[..., GDCHardwareManagementTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gdc hardware management client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GDCHardwareManagementTransport,Callable[..., GDCHardwareManagementTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GDCHardwareManagementTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GDCHardwareManagementClient._read_environment_variables() + self._client_cert_source = GDCHardwareManagementClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = GDCHardwareManagementClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GDCHardwareManagementTransport) + if transport_provided: + # transport is a GDCHardwareManagementTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GDCHardwareManagementTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GDCHardwareManagementClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GDCHardwareManagementTransport], + Callable[..., GDCHardwareManagementTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GDCHardwareManagementTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_orders( + self, + request: Optional[Union[service.ListOrdersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOrdersPager: + r"""Lists orders in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_orders(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListOrdersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_orders(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersRequest, dict]): + The request object. A request to list orders. + parent (str): + Required. The project and location to list orders in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListOrdersPager: + A list of orders. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListOrdersRequest): + request = service.ListOrdersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_orders] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOrdersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_order( + self, + request: Optional[Union[service.GetOrderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Order: + r"""Gets details of an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetOrderRequest( + name="name_value", + ) + + # Make the request + response = client.get_order(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetOrderRequest, dict]): + The request object. A request to get an order. + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Order: + An order for GDC hardware. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetOrderRequest): + request = service.GetOrderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_order( + self, + request: Optional[Union[service.CreateOrderRequest, dict]] = None, + *, + parent: Optional[str] = None, + order: Optional[resources.Order] = None, + order_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new order in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_create_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.CreateOrderRequest( + parent="parent_value", + order=order, + ) + + # Make the request + operation = client.create_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateOrderRequest, dict]): + The request object. A request to create an order. + parent (str): + Required. The project and location to create the order + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order (google.cloud.gdchardwaremanagement_v1alpha.types.Order): + Required. The order to create. + This corresponds to the ``order`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_id (str): + Optional. ID used to uniquely identify the Order within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. + Only lowercase characters, numbers and ``-`` are + accepted. The ``-`` character cannot be the first or the + last one. A system generated ID will be used if the + field is not set. + + The order.name field in the request will be ignored. + + This corresponds to the ``order_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order` + An order for GDC hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, order, order_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateOrderRequest): + request = service.CreateOrderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if order is not None: + request.order = order + if order_id is not None: + request.order_id = order_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Order, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_order( + self, + request: Optional[Union[service.UpdateOrderRequest, dict]] = None, + *, + order: Optional[resources.Order] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_update_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.UpdateOrderRequest( + order=order, + ) + + # Make the request + operation = client.update_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateOrderRequest, dict]): + The request object. A request to update an order. + order (google.cloud.gdchardwaremanagement_v1alpha.types.Order): + Required. The order to update. + This corresponds to the ``order`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Order to + overwrite with this update. The fields specified in the + update_mask are relative to the order, not the full + request. A field will be overwritten if it is in the + mask. If you don't provide a mask then all fields will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order` + An order for GDC hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([order, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateOrderRequest): + request = service.UpdateOrderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if order is not None: + request.order = order + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("order.name", request.order.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Order, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_order( + self, + request: Optional[Union[service.DeleteOrderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_delete_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteOrderRequest, dict]): + The request object. A request to delete an order. + name (str): + Required. The name of the order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteOrderRequest): + request = service.DeleteOrderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def submit_order( + self, + request: Optional[Union[service.SubmitOrderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Submits an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_submit_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SubmitOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.submit_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.SubmitOrderRequest, dict]): + The request object. A request to submit an order. + name (str): + Required. The name of the order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Order` + An order for GDC hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SubmitOrderRequest): + request = service.SubmitOrderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.submit_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Order, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_sites( + self, + request: Optional[Union[service.ListSitesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSitesPager: + r"""Lists sites in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_sites(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesRequest, dict]): + The request object. A request to list sites. + parent (str): + Required. The project and location to list sites in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSitesPager: + A list of sites. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSitesRequest): + request = service.ListSitesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sites] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSitesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_site( + self, + request: Optional[Union[service.GetSiteRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Site: + r"""Gets details of a site. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_site(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetSiteRequest, dict]): + The request object. A request to get a site. + name (str): + Required. The name of the site. Format: + ``projects/{project}/locations/{location}/sites/{site}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Site: + A physical site where hardware will + be installed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetSiteRequest): + request = service.GetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_site( + self, + request: Optional[Union[service.CreateSiteRequest, dict]] = None, + *, + parent: Optional[str] = None, + site: Optional[resources.Site] = None, + site_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new site in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_create_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.CreateSiteRequest( + parent="parent_value", + site=site, + ) + + # Make the request + operation = client.create_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateSiteRequest, dict]): + The request object. A request to create a site. + parent (str): + Required. The project and location to create the site + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + site (google.cloud.gdchardwaremanagement_v1alpha.types.Site): + Required. The site to create. + This corresponds to the ``site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + site_id (str): + Optional. ID used to uniquely identify the Site within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. + Only lowercase characters, numbers and ``-`` are + accepted. The ``-`` character cannot be the first or the + last one. A system generated ID will be used if the + field is not set. + + The site.name field in the request will be ignored. + + This corresponds to the ``site_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Site` + A physical site where hardware will be installed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, site, site_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateSiteRequest): + request = service.CreateSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if site is not None: + request.site = site + if site_id is not None: + request.site_id = site_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Site, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_site( + self, + request: Optional[Union[service.UpdateSiteRequest, dict]] = None, + *, + site: Optional[resources.Site] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a site. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_update_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.UpdateSiteRequest( + site=site, + ) + + # Make the request + operation = client.update_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateSiteRequest, dict]): + The request object. A request to update a site. + site (google.cloud.gdchardwaremanagement_v1alpha.types.Site): + Required. The site to update. + This corresponds to the ``site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Site to + overwrite with this update. The fields specified in the + update_mask are relative to the site, not the full + request. A field will be overwritten if it is in the + mask. If you don't provide a mask then all fields will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Site` + A physical site where hardware will be installed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([site, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateSiteRequest): + request = service.UpdateSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if site is not None: + request.site = site + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site.name", request.site.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Site, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_hardware_groups( + self, + request: Optional[Union[service.ListHardwareGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHardwareGroupsPager: + r"""Lists hardware groups in a given order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_hardware_groups(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsRequest, dict]): + The request object. A request to list hardware groups. + parent (str): + Required. The order to list hardware groups in. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwareGroupsPager: + A list of hardware groups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListHardwareGroupsRequest): + request = service.ListHardwareGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_hardware_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListHardwareGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_hardware_group( + self, + request: Optional[Union[service.GetHardwareGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.HardwareGroup: + r"""Gets details of a hardware group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_hardware_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareGroupRequest, dict]): + The request object. A request to get a hardware group. + name (str): + Required. The name of the hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup: + A group of hardware that is part of + the same order, has the same SKU, and is + delivered to the same site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetHardwareGroupRequest): + request = service.GetHardwareGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_hardware_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_hardware_group( + self, + request: Optional[Union[service.CreateHardwareGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + hardware_group: Optional[resources.HardwareGroup] = None, + hardware_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new hardware group in a given order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_create_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareGroupRequest( + parent="parent_value", + hardware_group=hardware_group, + ) + + # Make the request + operation = client.create_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareGroupRequest, dict]): + The request object. A request to create a hardware group. + parent (str): + Required. The order to create the hardware group in. + Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware_group (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup): + Required. The hardware group to + create. + + This corresponds to the ``hardware_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware_group_id (str): + Optional. ID used to uniquely identify the HardwareGroup + within its parent scope. This field should contain at + most 63 characters and must start with lowercase + characters. Only lowercase characters, numbers and ``-`` + are accepted. The ``-`` character cannot be the first or + the last one. A system generated ID will be used if the + field is not set. + + The hardware_group.name field in the request will be + ignored. + + This corresponds to the ``hardware_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup` A group of hardware that is part of the same order, has the same SKU, and is + delivered to the same site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, hardware_group, hardware_group_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateHardwareGroupRequest): + request = service.CreateHardwareGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if hardware_group is not None: + request.hardware_group = hardware_group + if hardware_group_id is not None: + request.hardware_group_id = hardware_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_hardware_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.HardwareGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_hardware_group( + self, + request: Optional[Union[service.UpdateHardwareGroupRequest, dict]] = None, + *, + hardware_group: Optional[resources.HardwareGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a hardware group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_update_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareGroupRequest( + hardware_group=hardware_group, + ) + + # Make the request + operation = client.update_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareGroupRequest, dict]): + The request object. A request to update a hardware group. + hardware_group (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup): + Required. The hardware group to + update. + + This corresponds to the ``hardware_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the + HardwareGroup to overwrite with this update. The fields + specified in the update_mask are relative to the + hardware group, not the full request. A field will be + overwritten if it is in the mask. If you don't provide a + mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup` A group of hardware that is part of the same order, has the same SKU, and is + delivered to the same site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([hardware_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateHardwareGroupRequest): + request = service.UpdateHardwareGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if hardware_group is not None: + request.hardware_group = hardware_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_hardware_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("hardware_group.name", request.hardware_group.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.HardwareGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_hardware_group( + self, + request: Optional[Union[service.DeleteHardwareGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a hardware group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_delete_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareGroupRequest, dict]): + The request object. A request to delete a hardware group. + name (str): + Required. The name of the hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteHardwareGroupRequest): + request = service.DeleteHardwareGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_hardware_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_hardware( + self, + request: Optional[Union[service.ListHardwareRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHardwarePager: + r"""Lists hardware in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareRequest, dict]): + The request object. A request to list hardware. + parent (str): + Required. The project and location to list hardware in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwarePager: + A list of hardware. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListHardwareRequest): + request = service.ListHardwareRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_hardware] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListHardwarePager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_hardware( + self, + request: Optional[Union[service.GetHardwareRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Hardware: + r"""Gets hardware details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareRequest( + name="name_value", + ) + + # Make the request + response = client.get_hardware(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareRequest, dict]): + The request object. A request to get hardware. + name (str): + Required. The name of the hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Hardware: + An instance of hardware installed at + a site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetHardwareRequest): + request = service.GetHardwareRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_hardware] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_hardware( + self, + request: Optional[Union[service.CreateHardwareRequest, dict]] = None, + *, + parent: Optional[str] = None, + hardware: Optional[resources.Hardware] = None, + hardware_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates new hardware in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_create_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareRequest( + parent="parent_value", + hardware=hardware, + ) + + # Make the request + operation = client.create_hardware(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareRequest, dict]): + The request object. A request to create hardware. + parent (str): + Required. The project and location to create hardware + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware (google.cloud.gdchardwaremanagement_v1alpha.types.Hardware): + Required. The resource to create. + This corresponds to the ``hardware`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hardware_id (str): + Optional. ID used to uniquely identify the Hardware + within its parent scope. This field should contain at + most 63 characters and must start with lowercase + characters. Only lowercase characters, numbers and ``-`` + are accepted. The ``-`` character cannot be the first or + the last one. A system generated ID will be used if the + field is not set. + + The hardware.name field in the request will be ignored. + + This corresponds to the ``hardware_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Hardware` + An instance of hardware installed at a site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, hardware, hardware_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateHardwareRequest): + request = service.CreateHardwareRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if hardware is not None: + request.hardware = hardware + if hardware_id is not None: + request.hardware_id = hardware_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_hardware] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Hardware, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_hardware( + self, + request: Optional[Union[service.UpdateHardwareRequest, dict]] = None, + *, + hardware: Optional[resources.Hardware] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates hardware parameters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_update_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareRequest( + hardware=hardware, + ) + + # Make the request + operation = client.update_hardware(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareRequest, dict]): + The request object. A request to update hardware. + hardware (google.cloud.gdchardwaremanagement_v1alpha.types.Hardware): + Required. The hardware to update. + This corresponds to the ``hardware`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Hardware + to overwrite with this update. The fields specified in + the update_mask are relative to the hardware, not the + full request. A field will be overwritten if it is in + the mask. If you don't provide a mask then all fields + will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Hardware` + An instance of hardware installed at a site. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([hardware, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateHardwareRequest): + request = service.UpdateHardwareRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if hardware is not None: + request.hardware = hardware + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_hardware] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("hardware.name", request.hardware.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Hardware, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_hardware( + self, + request: Optional[Union[service.DeleteHardwareRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes hardware. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_delete_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareRequest, dict]): + The request object. A request to delete hardware. + name (str): + Required. The name of the hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteHardwareRequest): + request = service.DeleteHardwareRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_hardware] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_comments( + self, + request: Optional[Union[service.ListCommentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCommentsPager: + r"""Lists the comments on an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_comments(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsRequest, dict]): + The request object. A request to list comments. + parent (str): + Required. The order to list comments on. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListCommentsPager: + A request to list comments. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListCommentsRequest): + request = service.ListCommentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_comments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCommentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_comment( + self, + request: Optional[Union[service.GetCommentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Gets the content of a comment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetCommentRequest( + name="name_value", + ) + + # Make the request + response = client.get_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetCommentRequest, dict]): + The request object. A request to get a comment. + name (str): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Comment: + A comment on an order. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetCommentRequest): + request = service.GetCommentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_comment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_comment( + self, + request: Optional[Union[service.CreateCommentRequest, dict]] = None, + *, + parent: Optional[str] = None, + comment: Optional[resources.Comment] = None, + comment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new comment on an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_create_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + comment = gdchardwaremanagement_v1alpha.Comment() + comment.text = "text_value" + + request = gdchardwaremanagement_v1alpha.CreateCommentRequest( + parent="parent_value", + comment=comment, + ) + + # Make the request + operation = client.create_comment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateCommentRequest, dict]): + The request object. A request to create a comment. + parent (str): + Required. The order to create the comment on. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + comment (google.cloud.gdchardwaremanagement_v1alpha.types.Comment): + Required. The comment to create. + This corresponds to the ``comment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + comment_id (str): + Optional. ID used to uniquely identify the Comment + within its parent scope. This field should contain at + most 63 characters and must start with lowercase + characters. Only lowercase characters, numbers and ``-`` + are accepted. The ``-`` character cannot be the first or + the last one. A system generated ID will be used if the + field is not set. + + The comment.name field in the request will be ignored. + + This corresponds to the ``comment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Comment` + A comment on an order. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, comment, comment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateCommentRequest): + request = service.CreateCommentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if comment is not None: + request.comment = comment + if comment_id is not None: + request.comment_id = comment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_comment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Comment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_change_log_entries( + self, + request: Optional[Union[service.ListChangeLogEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChangeLogEntriesPager: + r"""Lists the changes made to an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_change_log_entries(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListChangeLogEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_change_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesRequest, dict]): + The request object. A request to list change log entries. + parent (str): + Required. The order to list change log entries for. + Format: + ``projects/{project}/locations/{location}/orders/{order}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListChangeLogEntriesPager: + A list of change log entries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListChangeLogEntriesRequest): + request = service.ListChangeLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_change_log_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChangeLogEntriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_change_log_entry( + self, + request: Optional[Union[service.GetChangeLogEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ChangeLogEntry: + r"""Gets details of a change to an order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_change_log_entry(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetChangeLogEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_change_log_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetChangeLogEntryRequest, dict]): + The request object. A request to get a change log entry. + name (str): + Required. The name of the change log entry. Format: + ``projects/{project}/locations/{location}/orders/{order}/changeLogEntries/{change_log_entry}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.ChangeLogEntry: + A log entry of a change made to an + order. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetChangeLogEntryRequest): + request = service.GetChangeLogEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_change_log_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_skus( + self, + request: Optional[Union[service.ListSkusRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSkusPager: + r"""Lists SKUs for a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_skus(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSkusRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_skus(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusRequest, dict]): + The request object. A request to list SKUs. + parent (str): + Required. The project and location to list SKUs in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSkusPager: + A list of SKUs. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSkusRequest): + request = service.ListSkusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_skus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSkusPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sku( + self, + request: Optional[Union[service.GetSkuRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Sku: + r"""Gets details of an SKU. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_sku(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSkuRequest( + name="name_value", + ) + + # Make the request + response = client.get_sku(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetSkuRequest, dict]): + The request object. A request to get an SKU. + name (str): + Required. The name of the SKU. Format: + ``projects/{project}/locations/{location}/skus/{sku}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Sku: + A stock keeping unit (SKU) of GDC + hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetSkuRequest): + request = service.GetSkuRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_sku] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_zones( + self, + request: Optional[Union[service.ListZonesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListZonesPager: + r"""Lists zones in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_list_zones(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesRequest, dict]): + The request object. A request to list zones. + parent (str): + Required. The project and location to list zones in. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListZonesPager: + A list of zones. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_zones] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListZonesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_zone( + self, + request: Optional[Union[service.GetZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Zone: + r"""Gets details of a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_get_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = client.get_zone(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.GetZoneRequest, dict]): + The request object. A request to get a zone. + name (str): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Zone: + A zone holding a set of hardware. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_zone( + self, + request: Optional[Union[service.CreateZoneRequest, dict]] = None, + *, + parent: Optional[str] = None, + zone: Optional[resources.Zone] = None, + zone_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new zone in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_create_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.CreateZoneRequest( + parent="parent_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.CreateZoneRequest, dict]): + The request object. A request to create a zone. + parent (str): + Required. The project and location to create the zone + in. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (google.cloud.gdchardwaremanagement_v1alpha.types.Zone): + Required. The zone to create. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_id (str): + Optional. ID used to uniquely identify the Zone within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. + Only lowercase characters, numbers and ``-`` are + accepted. The ``-`` character cannot be the first or the + last one. A system generated ID will be used if the + field is not set. + + The zone.name field in the request will be ignored. + + This corresponds to the ``zone_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone` + A zone holding a set of hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, zone, zone_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if zone is not None: + request.zone = zone + if zone_id is not None: + request.zone_id = zone_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_zone( + self, + request: Optional[Union[service.UpdateZoneRequest, dict]] = None, + *, + zone: Optional[resources.Zone] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_update_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.UpdateZoneRequest, dict]): + The request object. A request to update a zone. + zone (google.cloud.gdchardwaremanagement_v1alpha.types.Zone): + Required. The zone to update. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Zone to + overwrite with this update. The fields specified in the + update_mask are relative to the zone, not the full + request. A field will be overwritten if it is in the + mask. If you don't provide a mask then all fields will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone` + A zone holding a set of hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([zone, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if zone is not None: + request.zone = zone + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("zone.name", request.zone.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_zone( + self, + request: Optional[Union[service.DeleteZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_delete_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.DeleteZoneRequest, dict]): + The request object. A request to delete a zone. + name (str): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def signal_zone_state( + self, + request: Optional[Union[service.SignalZoneStateRequest, dict]] = None, + *, + name: Optional[str] = None, + state_signal: Optional[service.SignalZoneStateRequest.StateSignal] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Signals the state of a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_signal_zone_state(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SignalZoneStateRequest( + name="name_value", + state_signal="FACTORY_TURNUP_CHECKS_FAILED", + ) + + # Make the request + operation = client.signal_zone_state(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest, dict]): + The request object. A request to signal the state of a + zone. + name (str): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + state_signal (google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest.StateSignal): + Required. The state signal to send + for this zone. + + This corresponds to the ``state_signal`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.Zone` + A zone holding a set of hardware. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, state_signal]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SignalZoneStateRequest): + request = service.SignalZoneStateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if state_signal is not None: + request.state_signal = state_signal + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.signal_zone_state] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GDCHardwareManagementClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GDCHardwareManagementClient",) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/pagers.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/pagers.py new file mode 100644 index 000000000000..ec4afec95b57 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/pagers.py @@ -0,0 +1,1051 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + + +class ListOrdersPager: + """A pager for iterating through ``list_orders`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``orders`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOrders`` requests and continue to iterate + through the ``orders`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListOrdersResponse], + request: service.ListOrdersRequest, + response: service.ListOrdersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListOrdersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListOrdersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Order]: + for page in self.pages: + yield from page.orders + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOrdersAsyncPager: + """A pager for iterating through ``list_orders`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``orders`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOrders`` requests and continue to iterate + through the ``orders`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListOrdersResponse]], + request: service.ListOrdersRequest, + response: service.ListOrdersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListOrdersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListOrdersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Order]: + async def async_generator(): + async for page in self.pages: + for response in page.orders: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSitesPager: + """A pager for iterating through ``list_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSites`` requests and continue to iterate + through the ``sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSitesResponse], + request: service.ListSitesRequest, + response: service.ListSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Site]: + for page in self.pages: + yield from page.sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSitesAsyncPager: + """A pager for iterating through ``list_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSites`` requests and continue to iterate + through the ``sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSitesResponse]], + request: service.ListSitesRequest, + response: service.ListSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Site]: + async def async_generator(): + async for page in self.pages: + for response in page.sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListHardwareGroupsPager: + """A pager for iterating through ``list_hardware_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``hardware_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListHardwareGroups`` requests and continue to iterate + through the ``hardware_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListHardwareGroupsResponse], + request: service.ListHardwareGroupsRequest, + response: service.ListHardwareGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListHardwareGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListHardwareGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.HardwareGroup]: + for page in self.pages: + yield from page.hardware_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListHardwareGroupsAsyncPager: + """A pager for iterating through ``list_hardware_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``hardware_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListHardwareGroups`` requests and continue to iterate + through the ``hardware_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListHardwareGroupsResponse]], + request: service.ListHardwareGroupsRequest, + response: service.ListHardwareGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListHardwareGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListHardwareGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.HardwareGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.hardware_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListHardwarePager: + """A pager for iterating through ``list_hardware`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareResponse` object, and + provides an ``__iter__`` method to iterate through its + ``hardware`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListHardware`` requests and continue to iterate + through the ``hardware`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListHardwareResponse], + request: service.ListHardwareRequest, + response: service.ListHardwareResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListHardwareRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListHardwareResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Hardware]: + for page in self.pages: + yield from page.hardware + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListHardwareAsyncPager: + """A pager for iterating through ``list_hardware`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``hardware`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListHardware`` requests and continue to iterate + through the ``hardware`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListHardwareResponse]], + request: service.ListHardwareRequest, + response: service.ListHardwareResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListHardwareRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListHardwareResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Hardware]: + async def async_generator(): + async for page in self.pages: + for response in page.hardware: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCommentsPager: + """A pager for iterating through ``list_comments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``comments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListComments`` requests and continue to iterate + through the ``comments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListCommentsResponse], + request: service.ListCommentsRequest, + response: service.ListCommentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListCommentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListCommentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Comment]: + for page in self.pages: + yield from page.comments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCommentsAsyncPager: + """A pager for iterating through ``list_comments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``comments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListComments`` requests and continue to iterate + through the ``comments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListCommentsResponse]], + request: service.ListCommentsRequest, + response: service.ListCommentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListCommentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListCommentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Comment]: + async def async_generator(): + async for page in self.pages: + for response in page.comments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChangeLogEntriesPager: + """A pager for iterating through ``list_change_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``change_log_entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChangeLogEntries`` requests and continue to iterate + through the ``change_log_entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListChangeLogEntriesResponse], + request: service.ListChangeLogEntriesRequest, + response: service.ListChangeLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListChangeLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListChangeLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.ChangeLogEntry]: + for page in self.pages: + yield from page.change_log_entries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChangeLogEntriesAsyncPager: + """A pager for iterating through ``list_change_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``change_log_entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChangeLogEntries`` requests and continue to iterate + through the ``change_log_entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListChangeLogEntriesResponse]], + request: service.ListChangeLogEntriesRequest, + response: service.ListChangeLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListChangeLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListChangeLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.ChangeLogEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.change_log_entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSkusPager: + """A pager for iterating through ``list_skus`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusResponse` object, and + provides an ``__iter__`` method to iterate through its + ``skus`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSkus`` requests and continue to iterate + through the ``skus`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSkusResponse], + request: service.ListSkusRequest, + response: service.ListSkusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSkusRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSkusResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Sku]: + for page in self.pages: + yield from page.skus + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSkusAsyncPager: + """A pager for iterating through ``list_skus`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``skus`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSkus`` requests and continue to iterate + through the ``skus`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSkusResponse]], + request: service.ListSkusRequest, + response: service.ListSkusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSkusRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSkusResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Sku]: + async def async_generator(): + async for page in self.pages: + for response in page.skus: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListZonesPager: + """A pager for iterating through ``list_zones`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``zones`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListZones`` requests and continue to iterate + through the ``zones`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListZonesResponse], + request: service.ListZonesRequest, + response: service.ListZonesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListZonesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListZonesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Zone]: + for page in self.pages: + yield from page.zones + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListZonesAsyncPager: + """A pager for iterating through ``list_zones`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``zones`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListZones`` requests and continue to iterate + through the ``zones`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListZonesResponse]], + request: service.ListZonesRequest, + response: service.ListZonesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesRequest): + The initial request object. + response (google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListZonesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListZonesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Zone]: + async def async_generator(): + async for page in self.pages: + for response in page.zones: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/__init__.py new file mode 100644 index 000000000000..b0b5760d8548 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GDCHardwareManagementTransport +from .grpc import GDCHardwareManagementGrpcTransport +from .grpc_asyncio import GDCHardwareManagementGrpcAsyncIOTransport +from .rest import ( + GDCHardwareManagementRestInterceptor, + GDCHardwareManagementRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[GDCHardwareManagementTransport]] +_transport_registry["grpc"] = GDCHardwareManagementGrpcTransport +_transport_registry["grpc_asyncio"] = GDCHardwareManagementGrpcAsyncIOTransport +_transport_registry["rest"] = GDCHardwareManagementRestTransport + +__all__ = ( + "GDCHardwareManagementTransport", + "GDCHardwareManagementGrpcTransport", + "GDCHardwareManagementGrpcAsyncIOTransport", + "GDCHardwareManagementRestTransport", + "GDCHardwareManagementRestInterceptor", +) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py new file mode 100644 index 000000000000..af0566012af8 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py @@ -0,0 +1,904 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha import gapic_version as package_version +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GDCHardwareManagementTransport(abc.ABC): + """Abstract transport class for GDCHardwareManagement.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "gdchardwaremanagement.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'gdchardwaremanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_orders: gapic_v1.method.wrap_method( + self.list_orders, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_order: gapic_v1.method.wrap_method( + self.get_order, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_order: gapic_v1.method.wrap_method( + self.create_order, + default_timeout=60.0, + client_info=client_info, + ), + self.update_order: gapic_v1.method.wrap_method( + self.update_order, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_order: gapic_v1.method.wrap_method( + self.delete_order, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.submit_order: gapic_v1.method.wrap_method( + self.submit_order, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sites: gapic_v1.method.wrap_method( + self.list_sites, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_site: gapic_v1.method.wrap_method( + self.get_site, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_site: gapic_v1.method.wrap_method( + self.create_site, + default_timeout=60.0, + client_info=client_info, + ), + self.update_site: gapic_v1.method.wrap_method( + self.update_site, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_hardware_groups: gapic_v1.method.wrap_method( + self.list_hardware_groups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_hardware_group: gapic_v1.method.wrap_method( + self.get_hardware_group, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_hardware_group: gapic_v1.method.wrap_method( + self.create_hardware_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_hardware_group: gapic_v1.method.wrap_method( + self.update_hardware_group, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_hardware_group: gapic_v1.method.wrap_method( + self.delete_hardware_group, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_hardware: gapic_v1.method.wrap_method( + self.list_hardware, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_hardware: gapic_v1.method.wrap_method( + self.get_hardware, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_hardware: gapic_v1.method.wrap_method( + self.create_hardware, + default_timeout=60.0, + client_info=client_info, + ), + self.update_hardware: gapic_v1.method.wrap_method( + self.update_hardware, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_hardware: gapic_v1.method.wrap_method( + self.delete_hardware, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_comments: gapic_v1.method.wrap_method( + self.list_comments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_comment: gapic_v1.method.wrap_method( + self.get_comment, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_comment: gapic_v1.method.wrap_method( + self.create_comment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_change_log_entries: gapic_v1.method.wrap_method( + self.list_change_log_entries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_change_log_entry: gapic_v1.method.wrap_method( + self.get_change_log_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_skus: gapic_v1.method.wrap_method( + self.list_skus, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sku: gapic_v1.method.wrap_method( + self.get_sku, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: gapic_v1.method.wrap_method( + self.list_zones, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: gapic_v1.method.wrap_method( + self.get_zone, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: gapic_v1.method.wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: gapic_v1.method.wrap_method( + self.update_zone, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: gapic_v1.method.wrap_method( + self.delete_zone, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.signal_zone_state: gapic_v1.method.wrap_method( + self.signal_zone_state, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_orders( + self, + ) -> Callable[ + [service.ListOrdersRequest], + Union[service.ListOrdersResponse, Awaitable[service.ListOrdersResponse]], + ]: + raise NotImplementedError() + + @property + def get_order( + self, + ) -> Callable[ + [service.GetOrderRequest], Union[resources.Order, Awaitable[resources.Order]] + ]: + raise NotImplementedError() + + @property + def create_order( + self, + ) -> Callable[ + [service.CreateOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_order( + self, + ) -> Callable[ + [service.UpdateOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_order( + self, + ) -> Callable[ + [service.DeleteOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def submit_order( + self, + ) -> Callable[ + [service.SubmitOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_sites( + self, + ) -> Callable[ + [service.ListSitesRequest], + Union[service.ListSitesResponse, Awaitable[service.ListSitesResponse]], + ]: + raise NotImplementedError() + + @property + def get_site( + self, + ) -> Callable[ + [service.GetSiteRequest], Union[resources.Site, Awaitable[resources.Site]] + ]: + raise NotImplementedError() + + @property + def create_site( + self, + ) -> Callable[ + [service.CreateSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_site( + self, + ) -> Callable[ + [service.UpdateSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_hardware_groups( + self, + ) -> Callable[ + [service.ListHardwareGroupsRequest], + Union[ + service.ListHardwareGroupsResponse, + Awaitable[service.ListHardwareGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_hardware_group( + self, + ) -> Callable[ + [service.GetHardwareGroupRequest], + Union[resources.HardwareGroup, Awaitable[resources.HardwareGroup]], + ]: + raise NotImplementedError() + + @property + def create_hardware_group( + self, + ) -> Callable[ + [service.CreateHardwareGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_hardware_group( + self, + ) -> Callable[ + [service.UpdateHardwareGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_hardware_group( + self, + ) -> Callable[ + [service.DeleteHardwareGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_hardware( + self, + ) -> Callable[ + [service.ListHardwareRequest], + Union[service.ListHardwareResponse, Awaitable[service.ListHardwareResponse]], + ]: + raise NotImplementedError() + + @property + def get_hardware( + self, + ) -> Callable[ + [service.GetHardwareRequest], + Union[resources.Hardware, Awaitable[resources.Hardware]], + ]: + raise NotImplementedError() + + @property + def create_hardware( + self, + ) -> Callable[ + [service.CreateHardwareRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_hardware( + self, + ) -> Callable[ + [service.UpdateHardwareRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_hardware( + self, + ) -> Callable[ + [service.DeleteHardwareRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_comments( + self, + ) -> Callable[ + [service.ListCommentsRequest], + Union[service.ListCommentsResponse, Awaitable[service.ListCommentsResponse]], + ]: + raise NotImplementedError() + + @property + def get_comment( + self, + ) -> Callable[ + [service.GetCommentRequest], + Union[resources.Comment, Awaitable[resources.Comment]], + ]: + raise NotImplementedError() + + @property + def create_comment( + self, + ) -> Callable[ + [service.CreateCommentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_change_log_entries( + self, + ) -> Callable[ + [service.ListChangeLogEntriesRequest], + Union[ + service.ListChangeLogEntriesResponse, + Awaitable[service.ListChangeLogEntriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_change_log_entry( + self, + ) -> Callable[ + [service.GetChangeLogEntryRequest], + Union[resources.ChangeLogEntry, Awaitable[resources.ChangeLogEntry]], + ]: + raise NotImplementedError() + + @property + def list_skus( + self, + ) -> Callable[ + [service.ListSkusRequest], + Union[service.ListSkusResponse, Awaitable[service.ListSkusResponse]], + ]: + raise NotImplementedError() + + @property + def get_sku( + self, + ) -> Callable[ + [service.GetSkuRequest], Union[resources.Sku, Awaitable[resources.Sku]] + ]: + raise NotImplementedError() + + @property + def list_zones( + self, + ) -> Callable[ + [service.ListZonesRequest], + Union[service.ListZonesResponse, Awaitable[service.ListZonesResponse]], + ]: + raise NotImplementedError() + + @property + def get_zone( + self, + ) -> Callable[ + [service.GetZoneRequest], Union[resources.Zone, Awaitable[resources.Zone]] + ]: + raise NotImplementedError() + + @property + def create_zone( + self, + ) -> Callable[ + [service.CreateZoneRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_zone( + self, + ) -> Callable[ + [service.UpdateZoneRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_zone( + self, + ) -> Callable[ + [service.DeleteZoneRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def signal_zone_state( + self, + ) -> Callable[ + [service.SignalZoneStateRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GDCHardwareManagementTransport",) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py new file mode 100644 index 000000000000..2c517c4a51b1 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py @@ -0,0 +1,1219 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, GDCHardwareManagementTransport + + +class GDCHardwareManagementGrpcTransport(GDCHardwareManagementTransport): + """gRPC backend transport for GDCHardwareManagement. + + The GDC Hardware Management service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "gdchardwaremanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'gdchardwaremanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "gdchardwaremanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_orders( + self, + ) -> Callable[[service.ListOrdersRequest], service.ListOrdersResponse]: + r"""Return a callable for the list orders method over gRPC. + + Lists orders in a given project and location. + + Returns: + Callable[[~.ListOrdersRequest], + ~.ListOrdersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_orders" not in self._stubs: + self._stubs["list_orders"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListOrders", + request_serializer=service.ListOrdersRequest.serialize, + response_deserializer=service.ListOrdersResponse.deserialize, + ) + return self._stubs["list_orders"] + + @property + def get_order(self) -> Callable[[service.GetOrderRequest], resources.Order]: + r"""Return a callable for the get order method over gRPC. + + Gets details of an order. + + Returns: + Callable[[~.GetOrderRequest], + ~.Order]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_order" not in self._stubs: + self._stubs["get_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetOrder", + request_serializer=service.GetOrderRequest.serialize, + response_deserializer=resources.Order.deserialize, + ) + return self._stubs["get_order"] + + @property + def create_order( + self, + ) -> Callable[[service.CreateOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the create order method over gRPC. + + Creates a new order in a given project and location. + + Returns: + Callable[[~.CreateOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_order" not in self._stubs: + self._stubs["create_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateOrder", + request_serializer=service.CreateOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_order"] + + @property + def update_order( + self, + ) -> Callable[[service.UpdateOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the update order method over gRPC. + + Updates the parameters of an order. + + Returns: + Callable[[~.UpdateOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_order" not in self._stubs: + self._stubs["update_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateOrder", + request_serializer=service.UpdateOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_order"] + + @property + def delete_order( + self, + ) -> Callable[[service.DeleteOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the delete order method over gRPC. + + Deletes an order. + + Returns: + Callable[[~.DeleteOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_order" not in self._stubs: + self._stubs["delete_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteOrder", + request_serializer=service.DeleteOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_order"] + + @property + def submit_order( + self, + ) -> Callable[[service.SubmitOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the submit order method over gRPC. + + Submits an order. + + Returns: + Callable[[~.SubmitOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_order" not in self._stubs: + self._stubs["submit_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/SubmitOrder", + request_serializer=service.SubmitOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["submit_order"] + + @property + def list_sites( + self, + ) -> Callable[[service.ListSitesRequest], service.ListSitesResponse]: + r"""Return a callable for the list sites method over gRPC. + + Lists sites in a given project and location. + + Returns: + Callable[[~.ListSitesRequest], + ~.ListSitesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sites" not in self._stubs: + self._stubs["list_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListSites", + request_serializer=service.ListSitesRequest.serialize, + response_deserializer=service.ListSitesResponse.deserialize, + ) + return self._stubs["list_sites"] + + @property + def get_site(self) -> Callable[[service.GetSiteRequest], resources.Site]: + r"""Return a callable for the get site method over gRPC. + + Gets details of a site. + + Returns: + Callable[[~.GetSiteRequest], + ~.Site]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site" not in self._stubs: + self._stubs["get_site"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetSite", + request_serializer=service.GetSiteRequest.serialize, + response_deserializer=resources.Site.deserialize, + ) + return self._stubs["get_site"] + + @property + def create_site( + self, + ) -> Callable[[service.CreateSiteRequest], operations_pb2.Operation]: + r"""Return a callable for the create site method over gRPC. + + Creates a new site in a given project and location. + + Returns: + Callable[[~.CreateSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_site" not in self._stubs: + self._stubs["create_site"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateSite", + request_serializer=service.CreateSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_site"] + + @property + def update_site( + self, + ) -> Callable[[service.UpdateSiteRequest], operations_pb2.Operation]: + r"""Return a callable for the update site method over gRPC. + + Updates the parameters of a site. + + Returns: + Callable[[~.UpdateSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_site" not in self._stubs: + self._stubs["update_site"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateSite", + request_serializer=service.UpdateSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_site"] + + @property + def list_hardware_groups( + self, + ) -> Callable[ + [service.ListHardwareGroupsRequest], service.ListHardwareGroupsResponse + ]: + r"""Return a callable for the list hardware groups method over gRPC. + + Lists hardware groups in a given order. + + Returns: + Callable[[~.ListHardwareGroupsRequest], + ~.ListHardwareGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_hardware_groups" not in self._stubs: + self._stubs["list_hardware_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListHardwareGroups", + request_serializer=service.ListHardwareGroupsRequest.serialize, + response_deserializer=service.ListHardwareGroupsResponse.deserialize, + ) + return self._stubs["list_hardware_groups"] + + @property + def get_hardware_group( + self, + ) -> Callable[[service.GetHardwareGroupRequest], resources.HardwareGroup]: + r"""Return a callable for the get hardware group method over gRPC. + + Gets details of a hardware group. + + Returns: + Callable[[~.GetHardwareGroupRequest], + ~.HardwareGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_hardware_group" not in self._stubs: + self._stubs["get_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetHardwareGroup", + request_serializer=service.GetHardwareGroupRequest.serialize, + response_deserializer=resources.HardwareGroup.deserialize, + ) + return self._stubs["get_hardware_group"] + + @property + def create_hardware_group( + self, + ) -> Callable[[service.CreateHardwareGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the create hardware group method over gRPC. + + Creates a new hardware group in a given order. + + Returns: + Callable[[~.CreateHardwareGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_hardware_group" not in self._stubs: + self._stubs["create_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateHardwareGroup", + request_serializer=service.CreateHardwareGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_hardware_group"] + + @property + def update_hardware_group( + self, + ) -> Callable[[service.UpdateHardwareGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the update hardware group method over gRPC. + + Updates the parameters of a hardware group. + + Returns: + Callable[[~.UpdateHardwareGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_hardware_group" not in self._stubs: + self._stubs["update_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateHardwareGroup", + request_serializer=service.UpdateHardwareGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_hardware_group"] + + @property + def delete_hardware_group( + self, + ) -> Callable[[service.DeleteHardwareGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete hardware group method over gRPC. + + Deletes a hardware group. + + Returns: + Callable[[~.DeleteHardwareGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_hardware_group" not in self._stubs: + self._stubs["delete_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteHardwareGroup", + request_serializer=service.DeleteHardwareGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_hardware_group"] + + @property + def list_hardware( + self, + ) -> Callable[[service.ListHardwareRequest], service.ListHardwareResponse]: + r"""Return a callable for the list hardware method over gRPC. + + Lists hardware in a given project and location. + + Returns: + Callable[[~.ListHardwareRequest], + ~.ListHardwareResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_hardware" not in self._stubs: + self._stubs["list_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListHardware", + request_serializer=service.ListHardwareRequest.serialize, + response_deserializer=service.ListHardwareResponse.deserialize, + ) + return self._stubs["list_hardware"] + + @property + def get_hardware( + self, + ) -> Callable[[service.GetHardwareRequest], resources.Hardware]: + r"""Return a callable for the get hardware method over gRPC. + + Gets hardware details. + + Returns: + Callable[[~.GetHardwareRequest], + ~.Hardware]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_hardware" not in self._stubs: + self._stubs["get_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetHardware", + request_serializer=service.GetHardwareRequest.serialize, + response_deserializer=resources.Hardware.deserialize, + ) + return self._stubs["get_hardware"] + + @property + def create_hardware( + self, + ) -> Callable[[service.CreateHardwareRequest], operations_pb2.Operation]: + r"""Return a callable for the create hardware method over gRPC. + + Creates new hardware in a given project and location. + + Returns: + Callable[[~.CreateHardwareRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_hardware" not in self._stubs: + self._stubs["create_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateHardware", + request_serializer=service.CreateHardwareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_hardware"] + + @property + def update_hardware( + self, + ) -> Callable[[service.UpdateHardwareRequest], operations_pb2.Operation]: + r"""Return a callable for the update hardware method over gRPC. + + Updates hardware parameters. + + Returns: + Callable[[~.UpdateHardwareRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_hardware" not in self._stubs: + self._stubs["update_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateHardware", + request_serializer=service.UpdateHardwareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_hardware"] + + @property + def delete_hardware( + self, + ) -> Callable[[service.DeleteHardwareRequest], operations_pb2.Operation]: + r"""Return a callable for the delete hardware method over gRPC. + + Deletes hardware. + + Returns: + Callable[[~.DeleteHardwareRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_hardware" not in self._stubs: + self._stubs["delete_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteHardware", + request_serializer=service.DeleteHardwareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_hardware"] + + @property + def list_comments( + self, + ) -> Callable[[service.ListCommentsRequest], service.ListCommentsResponse]: + r"""Return a callable for the list comments method over gRPC. + + Lists the comments on an order. + + Returns: + Callable[[~.ListCommentsRequest], + ~.ListCommentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_comments" not in self._stubs: + self._stubs["list_comments"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListComments", + request_serializer=service.ListCommentsRequest.serialize, + response_deserializer=service.ListCommentsResponse.deserialize, + ) + return self._stubs["list_comments"] + + @property + def get_comment(self) -> Callable[[service.GetCommentRequest], resources.Comment]: + r"""Return a callable for the get comment method over gRPC. + + Gets the content of a comment. + + Returns: + Callable[[~.GetCommentRequest], + ~.Comment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_comment" not in self._stubs: + self._stubs["get_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetComment", + request_serializer=service.GetCommentRequest.serialize, + response_deserializer=resources.Comment.deserialize, + ) + return self._stubs["get_comment"] + + @property + def create_comment( + self, + ) -> Callable[[service.CreateCommentRequest], operations_pb2.Operation]: + r"""Return a callable for the create comment method over gRPC. + + Creates a new comment on an order. + + Returns: + Callable[[~.CreateCommentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_comment" not in self._stubs: + self._stubs["create_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateComment", + request_serializer=service.CreateCommentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_comment"] + + @property + def list_change_log_entries( + self, + ) -> Callable[ + [service.ListChangeLogEntriesRequest], service.ListChangeLogEntriesResponse + ]: + r"""Return a callable for the list change log entries method over gRPC. + + Lists the changes made to an order. + + Returns: + Callable[[~.ListChangeLogEntriesRequest], + ~.ListChangeLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_change_log_entries" not in self._stubs: + self._stubs["list_change_log_entries"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListChangeLogEntries", + request_serializer=service.ListChangeLogEntriesRequest.serialize, + response_deserializer=service.ListChangeLogEntriesResponse.deserialize, + ) + return self._stubs["list_change_log_entries"] + + @property + def get_change_log_entry( + self, + ) -> Callable[[service.GetChangeLogEntryRequest], resources.ChangeLogEntry]: + r"""Return a callable for the get change log entry method over gRPC. + + Gets details of a change to an order. + + Returns: + Callable[[~.GetChangeLogEntryRequest], + ~.ChangeLogEntry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_change_log_entry" not in self._stubs: + self._stubs["get_change_log_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetChangeLogEntry", + request_serializer=service.GetChangeLogEntryRequest.serialize, + response_deserializer=resources.ChangeLogEntry.deserialize, + ) + return self._stubs["get_change_log_entry"] + + @property + def list_skus( + self, + ) -> Callable[[service.ListSkusRequest], service.ListSkusResponse]: + r"""Return a callable for the list skus method over gRPC. + + Lists SKUs for a given project and location. + + Returns: + Callable[[~.ListSkusRequest], + ~.ListSkusResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_skus" not in self._stubs: + self._stubs["list_skus"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListSkus", + request_serializer=service.ListSkusRequest.serialize, + response_deserializer=service.ListSkusResponse.deserialize, + ) + return self._stubs["list_skus"] + + @property + def get_sku(self) -> Callable[[service.GetSkuRequest], resources.Sku]: + r"""Return a callable for the get sku method over gRPC. + + Gets details of an SKU. + + Returns: + Callable[[~.GetSkuRequest], + ~.Sku]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sku" not in self._stubs: + self._stubs["get_sku"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetSku", + request_serializer=service.GetSkuRequest.serialize, + response_deserializer=resources.Sku.deserialize, + ) + return self._stubs["get_sku"] + + @property + def list_zones( + self, + ) -> Callable[[service.ListZonesRequest], service.ListZonesResponse]: + r"""Return a callable for the list zones method over gRPC. + + Lists zones in a given project and location. + + Returns: + Callable[[~.ListZonesRequest], + ~.ListZonesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_zones" not in self._stubs: + self._stubs["list_zones"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListZones", + request_serializer=service.ListZonesRequest.serialize, + response_deserializer=service.ListZonesResponse.deserialize, + ) + return self._stubs["list_zones"] + + @property + def get_zone(self) -> Callable[[service.GetZoneRequest], resources.Zone]: + r"""Return a callable for the get zone method over gRPC. + + Gets details of a zone. + + Returns: + Callable[[~.GetZoneRequest], + ~.Zone]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_zone" not in self._stubs: + self._stubs["get_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetZone", + request_serializer=service.GetZoneRequest.serialize, + response_deserializer=resources.Zone.deserialize, + ) + return self._stubs["get_zone"] + + @property + def create_zone( + self, + ) -> Callable[[service.CreateZoneRequest], operations_pb2.Operation]: + r"""Return a callable for the create zone method over gRPC. + + Creates a new zone in a given project and location. + + Returns: + Callable[[~.CreateZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_zone" not in self._stubs: + self._stubs["create_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateZone", + request_serializer=service.CreateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_zone"] + + @property + def update_zone( + self, + ) -> Callable[[service.UpdateZoneRequest], operations_pb2.Operation]: + r"""Return a callable for the update zone method over gRPC. + + Updates the parameters of a zone. + + Returns: + Callable[[~.UpdateZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_zone" not in self._stubs: + self._stubs["update_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateZone", + request_serializer=service.UpdateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_zone"] + + @property + def delete_zone( + self, + ) -> Callable[[service.DeleteZoneRequest], operations_pb2.Operation]: + r"""Return a callable for the delete zone method over gRPC. + + Deletes a zone. + + Returns: + Callable[[~.DeleteZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_zone" not in self._stubs: + self._stubs["delete_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteZone", + request_serializer=service.DeleteZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_zone"] + + @property + def signal_zone_state( + self, + ) -> Callable[[service.SignalZoneStateRequest], operations_pb2.Operation]: + r"""Return a callable for the signal zone state method over gRPC. + + Signals the state of a zone. + + Returns: + Callable[[~.SignalZoneStateRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "signal_zone_state" not in self._stubs: + self._stubs["signal_zone_state"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/SignalZoneState", + request_serializer=service.SignalZoneStateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["signal_zone_state"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GDCHardwareManagementGrpcTransport",) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7789c520aab6 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py @@ -0,0 +1,1647 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, GDCHardwareManagementTransport +from .grpc import GDCHardwareManagementGrpcTransport + + +class GDCHardwareManagementGrpcAsyncIOTransport(GDCHardwareManagementTransport): + """gRPC AsyncIO backend transport for GDCHardwareManagement. + + The GDC Hardware Management service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "gdchardwaremanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "gdchardwaremanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'gdchardwaremanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_orders( + self, + ) -> Callable[[service.ListOrdersRequest], Awaitable[service.ListOrdersResponse]]: + r"""Return a callable for the list orders method over gRPC. + + Lists orders in a given project and location. + + Returns: + Callable[[~.ListOrdersRequest], + Awaitable[~.ListOrdersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_orders" not in self._stubs: + self._stubs["list_orders"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListOrders", + request_serializer=service.ListOrdersRequest.serialize, + response_deserializer=service.ListOrdersResponse.deserialize, + ) + return self._stubs["list_orders"] + + @property + def get_order( + self, + ) -> Callable[[service.GetOrderRequest], Awaitable[resources.Order]]: + r"""Return a callable for the get order method over gRPC. + + Gets details of an order. + + Returns: + Callable[[~.GetOrderRequest], + Awaitable[~.Order]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_order" not in self._stubs: + self._stubs["get_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetOrder", + request_serializer=service.GetOrderRequest.serialize, + response_deserializer=resources.Order.deserialize, + ) + return self._stubs["get_order"] + + @property + def create_order( + self, + ) -> Callable[[service.CreateOrderRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create order method over gRPC. + + Creates a new order in a given project and location. + + Returns: + Callable[[~.CreateOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_order" not in self._stubs: + self._stubs["create_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateOrder", + request_serializer=service.CreateOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_order"] + + @property + def update_order( + self, + ) -> Callable[[service.UpdateOrderRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update order method over gRPC. + + Updates the parameters of an order. + + Returns: + Callable[[~.UpdateOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_order" not in self._stubs: + self._stubs["update_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateOrder", + request_serializer=service.UpdateOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_order"] + + @property + def delete_order( + self, + ) -> Callable[[service.DeleteOrderRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete order method over gRPC. + + Deletes an order. + + Returns: + Callable[[~.DeleteOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_order" not in self._stubs: + self._stubs["delete_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteOrder", + request_serializer=service.DeleteOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_order"] + + @property + def submit_order( + self, + ) -> Callable[[service.SubmitOrderRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the submit order method over gRPC. + + Submits an order. + + Returns: + Callable[[~.SubmitOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_order" not in self._stubs: + self._stubs["submit_order"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/SubmitOrder", + request_serializer=service.SubmitOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["submit_order"] + + @property + def list_sites( + self, + ) -> Callable[[service.ListSitesRequest], Awaitable[service.ListSitesResponse]]: + r"""Return a callable for the list sites method over gRPC. + + Lists sites in a given project and location. + + Returns: + Callable[[~.ListSitesRequest], + Awaitable[~.ListSitesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sites" not in self._stubs: + self._stubs["list_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListSites", + request_serializer=service.ListSitesRequest.serialize, + response_deserializer=service.ListSitesResponse.deserialize, + ) + return self._stubs["list_sites"] + + @property + def get_site(self) -> Callable[[service.GetSiteRequest], Awaitable[resources.Site]]: + r"""Return a callable for the get site method over gRPC. + + Gets details of a site. + + Returns: + Callable[[~.GetSiteRequest], + Awaitable[~.Site]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site" not in self._stubs: + self._stubs["get_site"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetSite", + request_serializer=service.GetSiteRequest.serialize, + response_deserializer=resources.Site.deserialize, + ) + return self._stubs["get_site"] + + @property + def create_site( + self, + ) -> Callable[[service.CreateSiteRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create site method over gRPC. + + Creates a new site in a given project and location. + + Returns: + Callable[[~.CreateSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_site" not in self._stubs: + self._stubs["create_site"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateSite", + request_serializer=service.CreateSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_site"] + + @property + def update_site( + self, + ) -> Callable[[service.UpdateSiteRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update site method over gRPC. + + Updates the parameters of a site. + + Returns: + Callable[[~.UpdateSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_site" not in self._stubs: + self._stubs["update_site"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateSite", + request_serializer=service.UpdateSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_site"] + + @property + def list_hardware_groups( + self, + ) -> Callable[ + [service.ListHardwareGroupsRequest], + Awaitable[service.ListHardwareGroupsResponse], + ]: + r"""Return a callable for the list hardware groups method over gRPC. + + Lists hardware groups in a given order. + + Returns: + Callable[[~.ListHardwareGroupsRequest], + Awaitable[~.ListHardwareGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_hardware_groups" not in self._stubs: + self._stubs["list_hardware_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListHardwareGroups", + request_serializer=service.ListHardwareGroupsRequest.serialize, + response_deserializer=service.ListHardwareGroupsResponse.deserialize, + ) + return self._stubs["list_hardware_groups"] + + @property + def get_hardware_group( + self, + ) -> Callable[ + [service.GetHardwareGroupRequest], Awaitable[resources.HardwareGroup] + ]: + r"""Return a callable for the get hardware group method over gRPC. + + Gets details of a hardware group. + + Returns: + Callable[[~.GetHardwareGroupRequest], + Awaitable[~.HardwareGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_hardware_group" not in self._stubs: + self._stubs["get_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetHardwareGroup", + request_serializer=service.GetHardwareGroupRequest.serialize, + response_deserializer=resources.HardwareGroup.deserialize, + ) + return self._stubs["get_hardware_group"] + + @property + def create_hardware_group( + self, + ) -> Callable[ + [service.CreateHardwareGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create hardware group method over gRPC. + + Creates a new hardware group in a given order. + + Returns: + Callable[[~.CreateHardwareGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_hardware_group" not in self._stubs: + self._stubs["create_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateHardwareGroup", + request_serializer=service.CreateHardwareGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_hardware_group"] + + @property + def update_hardware_group( + self, + ) -> Callable[ + [service.UpdateHardwareGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update hardware group method over gRPC. + + Updates the parameters of a hardware group. + + Returns: + Callable[[~.UpdateHardwareGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_hardware_group" not in self._stubs: + self._stubs["update_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateHardwareGroup", + request_serializer=service.UpdateHardwareGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_hardware_group"] + + @property + def delete_hardware_group( + self, + ) -> Callable[ + [service.DeleteHardwareGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete hardware group method over gRPC. + + Deletes a hardware group. + + Returns: + Callable[[~.DeleteHardwareGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_hardware_group" not in self._stubs: + self._stubs["delete_hardware_group"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteHardwareGroup", + request_serializer=service.DeleteHardwareGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_hardware_group"] + + @property + def list_hardware( + self, + ) -> Callable[ + [service.ListHardwareRequest], Awaitable[service.ListHardwareResponse] + ]: + r"""Return a callable for the list hardware method over gRPC. + + Lists hardware in a given project and location. + + Returns: + Callable[[~.ListHardwareRequest], + Awaitable[~.ListHardwareResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_hardware" not in self._stubs: + self._stubs["list_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListHardware", + request_serializer=service.ListHardwareRequest.serialize, + response_deserializer=service.ListHardwareResponse.deserialize, + ) + return self._stubs["list_hardware"] + + @property + def get_hardware( + self, + ) -> Callable[[service.GetHardwareRequest], Awaitable[resources.Hardware]]: + r"""Return a callable for the get hardware method over gRPC. + + Gets hardware details. + + Returns: + Callable[[~.GetHardwareRequest], + Awaitable[~.Hardware]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_hardware" not in self._stubs: + self._stubs["get_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetHardware", + request_serializer=service.GetHardwareRequest.serialize, + response_deserializer=resources.Hardware.deserialize, + ) + return self._stubs["get_hardware"] + + @property + def create_hardware( + self, + ) -> Callable[[service.CreateHardwareRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create hardware method over gRPC. + + Creates new hardware in a given project and location. + + Returns: + Callable[[~.CreateHardwareRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_hardware" not in self._stubs: + self._stubs["create_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateHardware", + request_serializer=service.CreateHardwareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_hardware"] + + @property + def update_hardware( + self, + ) -> Callable[[service.UpdateHardwareRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update hardware method over gRPC. + + Updates hardware parameters. + + Returns: + Callable[[~.UpdateHardwareRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_hardware" not in self._stubs: + self._stubs["update_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateHardware", + request_serializer=service.UpdateHardwareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_hardware"] + + @property + def delete_hardware( + self, + ) -> Callable[[service.DeleteHardwareRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete hardware method over gRPC. + + Deletes hardware. + + Returns: + Callable[[~.DeleteHardwareRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_hardware" not in self._stubs: + self._stubs["delete_hardware"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteHardware", + request_serializer=service.DeleteHardwareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_hardware"] + + @property + def list_comments( + self, + ) -> Callable[ + [service.ListCommentsRequest], Awaitable[service.ListCommentsResponse] + ]: + r"""Return a callable for the list comments method over gRPC. + + Lists the comments on an order. + + Returns: + Callable[[~.ListCommentsRequest], + Awaitable[~.ListCommentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_comments" not in self._stubs: + self._stubs["list_comments"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListComments", + request_serializer=service.ListCommentsRequest.serialize, + response_deserializer=service.ListCommentsResponse.deserialize, + ) + return self._stubs["list_comments"] + + @property + def get_comment( + self, + ) -> Callable[[service.GetCommentRequest], Awaitable[resources.Comment]]: + r"""Return a callable for the get comment method over gRPC. + + Gets the content of a comment. + + Returns: + Callable[[~.GetCommentRequest], + Awaitable[~.Comment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_comment" not in self._stubs: + self._stubs["get_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetComment", + request_serializer=service.GetCommentRequest.serialize, + response_deserializer=resources.Comment.deserialize, + ) + return self._stubs["get_comment"] + + @property + def create_comment( + self, + ) -> Callable[[service.CreateCommentRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create comment method over gRPC. + + Creates a new comment on an order. + + Returns: + Callable[[~.CreateCommentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_comment" not in self._stubs: + self._stubs["create_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateComment", + request_serializer=service.CreateCommentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_comment"] + + @property + def list_change_log_entries( + self, + ) -> Callable[ + [service.ListChangeLogEntriesRequest], + Awaitable[service.ListChangeLogEntriesResponse], + ]: + r"""Return a callable for the list change log entries method over gRPC. + + Lists the changes made to an order. + + Returns: + Callable[[~.ListChangeLogEntriesRequest], + Awaitable[~.ListChangeLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_change_log_entries" not in self._stubs: + self._stubs["list_change_log_entries"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListChangeLogEntries", + request_serializer=service.ListChangeLogEntriesRequest.serialize, + response_deserializer=service.ListChangeLogEntriesResponse.deserialize, + ) + return self._stubs["list_change_log_entries"] + + @property + def get_change_log_entry( + self, + ) -> Callable[ + [service.GetChangeLogEntryRequest], Awaitable[resources.ChangeLogEntry] + ]: + r"""Return a callable for the get change log entry method over gRPC. + + Gets details of a change to an order. + + Returns: + Callable[[~.GetChangeLogEntryRequest], + Awaitable[~.ChangeLogEntry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_change_log_entry" not in self._stubs: + self._stubs["get_change_log_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetChangeLogEntry", + request_serializer=service.GetChangeLogEntryRequest.serialize, + response_deserializer=resources.ChangeLogEntry.deserialize, + ) + return self._stubs["get_change_log_entry"] + + @property + def list_skus( + self, + ) -> Callable[[service.ListSkusRequest], Awaitable[service.ListSkusResponse]]: + r"""Return a callable for the list skus method over gRPC. + + Lists SKUs for a given project and location. + + Returns: + Callable[[~.ListSkusRequest], + Awaitable[~.ListSkusResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_skus" not in self._stubs: + self._stubs["list_skus"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListSkus", + request_serializer=service.ListSkusRequest.serialize, + response_deserializer=service.ListSkusResponse.deserialize, + ) + return self._stubs["list_skus"] + + @property + def get_sku(self) -> Callable[[service.GetSkuRequest], Awaitable[resources.Sku]]: + r"""Return a callable for the get sku method over gRPC. + + Gets details of an SKU. + + Returns: + Callable[[~.GetSkuRequest], + Awaitable[~.Sku]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sku" not in self._stubs: + self._stubs["get_sku"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetSku", + request_serializer=service.GetSkuRequest.serialize, + response_deserializer=resources.Sku.deserialize, + ) + return self._stubs["get_sku"] + + @property + def list_zones( + self, + ) -> Callable[[service.ListZonesRequest], Awaitable[service.ListZonesResponse]]: + r"""Return a callable for the list zones method over gRPC. + + Lists zones in a given project and location. + + Returns: + Callable[[~.ListZonesRequest], + Awaitable[~.ListZonesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_zones" not in self._stubs: + self._stubs["list_zones"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/ListZones", + request_serializer=service.ListZonesRequest.serialize, + response_deserializer=service.ListZonesResponse.deserialize, + ) + return self._stubs["list_zones"] + + @property + def get_zone(self) -> Callable[[service.GetZoneRequest], Awaitable[resources.Zone]]: + r"""Return a callable for the get zone method over gRPC. + + Gets details of a zone. + + Returns: + Callable[[~.GetZoneRequest], + Awaitable[~.Zone]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_zone" not in self._stubs: + self._stubs["get_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/GetZone", + request_serializer=service.GetZoneRequest.serialize, + response_deserializer=resources.Zone.deserialize, + ) + return self._stubs["get_zone"] + + @property + def create_zone( + self, + ) -> Callable[[service.CreateZoneRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create zone method over gRPC. + + Creates a new zone in a given project and location. + + Returns: + Callable[[~.CreateZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_zone" not in self._stubs: + self._stubs["create_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/CreateZone", + request_serializer=service.CreateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_zone"] + + @property + def update_zone( + self, + ) -> Callable[[service.UpdateZoneRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update zone method over gRPC. + + Updates the parameters of a zone. + + Returns: + Callable[[~.UpdateZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_zone" not in self._stubs: + self._stubs["update_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/UpdateZone", + request_serializer=service.UpdateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_zone"] + + @property + def delete_zone( + self, + ) -> Callable[[service.DeleteZoneRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete zone method over gRPC. + + Deletes a zone. + + Returns: + Callable[[~.DeleteZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_zone" not in self._stubs: + self._stubs["delete_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/DeleteZone", + request_serializer=service.DeleteZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_zone"] + + @property + def signal_zone_state( + self, + ) -> Callable[ + [service.SignalZoneStateRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the signal zone state method over gRPC. + + Signals the state of a zone. + + Returns: + Callable[[~.SignalZoneStateRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "signal_zone_state" not in self._stubs: + self._stubs["signal_zone_state"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/SignalZoneState", + request_serializer=service.SignalZoneStateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["signal_zone_state"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_orders: gapic_v1.method_async.wrap_method( + self.list_orders, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_order: gapic_v1.method_async.wrap_method( + self.get_order, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_order: gapic_v1.method_async.wrap_method( + self.create_order, + default_timeout=60.0, + client_info=client_info, + ), + self.update_order: gapic_v1.method_async.wrap_method( + self.update_order, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_order: gapic_v1.method_async.wrap_method( + self.delete_order, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.submit_order: gapic_v1.method_async.wrap_method( + self.submit_order, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sites: gapic_v1.method_async.wrap_method( + self.list_sites, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_site: gapic_v1.method_async.wrap_method( + self.get_site, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_site: gapic_v1.method_async.wrap_method( + self.create_site, + default_timeout=60.0, + client_info=client_info, + ), + self.update_site: gapic_v1.method_async.wrap_method( + self.update_site, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_hardware_groups: gapic_v1.method_async.wrap_method( + self.list_hardware_groups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_hardware_group: gapic_v1.method_async.wrap_method( + self.get_hardware_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_hardware_group: gapic_v1.method_async.wrap_method( + self.create_hardware_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_hardware_group: gapic_v1.method_async.wrap_method( + self.update_hardware_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_hardware_group: gapic_v1.method_async.wrap_method( + self.delete_hardware_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_hardware: gapic_v1.method_async.wrap_method( + self.list_hardware, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_hardware: gapic_v1.method_async.wrap_method( + self.get_hardware, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_hardware: gapic_v1.method_async.wrap_method( + self.create_hardware, + default_timeout=60.0, + client_info=client_info, + ), + self.update_hardware: gapic_v1.method_async.wrap_method( + self.update_hardware, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_hardware: gapic_v1.method_async.wrap_method( + self.delete_hardware, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_comments: gapic_v1.method_async.wrap_method( + self.list_comments, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_comment: gapic_v1.method_async.wrap_method( + self.get_comment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_comment: gapic_v1.method_async.wrap_method( + self.create_comment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_change_log_entries: gapic_v1.method_async.wrap_method( + self.list_change_log_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_change_log_entry: gapic_v1.method_async.wrap_method( + self.get_change_log_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_skus: gapic_v1.method_async.wrap_method( + self.list_skus, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sku: gapic_v1.method_async.wrap_method( + self.get_sku, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: gapic_v1.method_async.wrap_method( + self.list_zones, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: gapic_v1.method_async.wrap_method( + self.get_zone, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: gapic_v1.method_async.wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: gapic_v1.method_async.wrap_method( + self.update_zone, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: gapic_v1.method_async.wrap_method( + self.delete_zone, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.signal_zone_state: gapic_v1.method_async.wrap_method( + self.signal_zone_state, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("GDCHardwareManagementGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py new file mode 100644 index 000000000000..b95064ee417e --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py @@ -0,0 +1,4941 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GDCHardwareManagementTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GDCHardwareManagementRestInterceptor: + """Interceptor for GDCHardwareManagement. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GDCHardwareManagementRestTransport. + + .. code-block:: python + class MyCustomGDCHardwareManagementInterceptor(GDCHardwareManagementRestInterceptor): + def pre_create_comment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_comment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_hardware(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_hardware(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_hardware_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_hardware_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_order(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_zone(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_hardware(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_hardware(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_hardware_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_hardware_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_order(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_zone(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_change_log_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_change_log_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_comment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_comment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_hardware(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_hardware(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_hardware_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_hardware_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_order(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_sku(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_sku(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_zone(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_change_log_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_change_log_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_comments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_comments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_hardware(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_hardware(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_hardware_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_hardware_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_orders(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_orders(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_skus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_skus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_zones(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_zones(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_signal_zone_state(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_signal_zone_state(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_submit_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_order(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_hardware(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_hardware(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_hardware_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_hardware_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_order(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_zone(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GDCHardwareManagementRestTransport(interceptor=MyCustomGDCHardwareManagementInterceptor()) + client = GDCHardwareManagementClient(transport=transport) + + + """ + + def pre_create_comment( + self, request: service.CreateCommentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateCommentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_comment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_create_comment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_comment + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_create_hardware( + self, + request: service.CreateHardwareRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateHardwareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_hardware + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_create_hardware( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_hardware + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_create_hardware_group( + self, + request: service.CreateHardwareGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateHardwareGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_hardware_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_create_hardware_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_hardware_group + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_create_order( + self, request: service.CreateOrderRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_create_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_order + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_create_site( + self, request: service.CreateSiteRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateSiteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_create_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_site + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_create_zone( + self, request: service.CreateZoneRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_create_zone( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_zone + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_delete_hardware( + self, + request: service.DeleteHardwareRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.DeleteHardwareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_hardware + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_delete_hardware( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_hardware + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_delete_hardware_group( + self, + request: service.DeleteHardwareGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.DeleteHardwareGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_hardware_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_delete_hardware_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_hardware_group + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_delete_order( + self, request: service.DeleteOrderRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_delete_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_order + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_delete_zone( + self, request: service.DeleteZoneRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_delete_zone( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_zone + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_change_log_entry( + self, + request: service.GetChangeLogEntryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetChangeLogEntryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_change_log_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_change_log_entry( + self, response: resources.ChangeLogEntry + ) -> resources.ChangeLogEntry: + """Post-rpc interceptor for get_change_log_entry + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_comment( + self, request: service.GetCommentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetCommentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_comment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_comment(self, response: resources.Comment) -> resources.Comment: + """Post-rpc interceptor for get_comment + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_hardware( + self, request: service.GetHardwareRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetHardwareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_hardware + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_hardware(self, response: resources.Hardware) -> resources.Hardware: + """Post-rpc interceptor for get_hardware + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_hardware_group( + self, + request: service.GetHardwareGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetHardwareGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_hardware_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_hardware_group( + self, response: resources.HardwareGroup + ) -> resources.HardwareGroup: + """Post-rpc interceptor for get_hardware_group + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_order( + self, request: service.GetOrderRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_order(self, response: resources.Order) -> resources.Order: + """Post-rpc interceptor for get_order + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_site( + self, request: service.GetSiteRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetSiteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_site(self, response: resources.Site) -> resources.Site: + """Post-rpc interceptor for get_site + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_sku( + self, request: service.GetSkuRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetSkuRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_sku + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_sku(self, response: resources.Sku) -> resources.Sku: + """Post-rpc interceptor for get_sku + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_zone( + self, request: service.GetZoneRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_zone(self, response: resources.Zone) -> resources.Zone: + """Post-rpc interceptor for get_zone + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_change_log_entries( + self, + request: service.ListChangeLogEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListChangeLogEntriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_change_log_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_change_log_entries( + self, response: service.ListChangeLogEntriesResponse + ) -> service.ListChangeLogEntriesResponse: + """Post-rpc interceptor for list_change_log_entries + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_comments( + self, request: service.ListCommentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListCommentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_comments + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_comments( + self, response: service.ListCommentsResponse + ) -> service.ListCommentsResponse: + """Post-rpc interceptor for list_comments + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_hardware( + self, request: service.ListHardwareRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListHardwareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_hardware + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_hardware( + self, response: service.ListHardwareResponse + ) -> service.ListHardwareResponse: + """Post-rpc interceptor for list_hardware + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_hardware_groups( + self, + request: service.ListHardwareGroupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListHardwareGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_hardware_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_hardware_groups( + self, response: service.ListHardwareGroupsResponse + ) -> service.ListHardwareGroupsResponse: + """Post-rpc interceptor for list_hardware_groups + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_orders( + self, request: service.ListOrdersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListOrdersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_orders + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_orders( + self, response: service.ListOrdersResponse + ) -> service.ListOrdersResponse: + """Post-rpc interceptor for list_orders + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_sites( + self, request: service.ListSitesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListSitesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_sites( + self, response: service.ListSitesResponse + ) -> service.ListSitesResponse: + """Post-rpc interceptor for list_sites + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_skus( + self, request: service.ListSkusRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListSkusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_skus + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_skus( + self, response: service.ListSkusResponse + ) -> service.ListSkusResponse: + """Post-rpc interceptor for list_skus + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_zones( + self, request: service.ListZonesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListZonesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_zones + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_zones( + self, response: service.ListZonesResponse + ) -> service.ListZonesResponse: + """Post-rpc interceptor for list_zones + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_signal_zone_state( + self, + request: service.SignalZoneStateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.SignalZoneStateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for signal_zone_state + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_signal_zone_state( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for signal_zone_state + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_submit_order( + self, request: service.SubmitOrderRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.SubmitOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_submit_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for submit_order + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_update_hardware( + self, + request: service.UpdateHardwareRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateHardwareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_hardware + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_update_hardware( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_hardware + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_update_hardware_group( + self, + request: service.UpdateHardwareGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateHardwareGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_hardware_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_update_hardware_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_hardware_group + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_update_order( + self, request: service.UpdateOrderRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_update_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_order + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_update_site( + self, request: service.UpdateSiteRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateSiteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_update_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_site + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_update_zone( + self, request: service.UpdateZoneRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_update_zone( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_zone + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GDCHardwareManagementRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GDCHardwareManagementRestInterceptor + + +class GDCHardwareManagementRestTransport(GDCHardwareManagementTransport): + """REST backend transport for GDCHardwareManagement. + + The GDC Hardware Management service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "gdchardwaremanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GDCHardwareManagementRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'gdchardwaremanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GDCHardwareManagementRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateComment(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("CreateComment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateCommentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create comment method over HTTP. + + Args: + request (~.service.CreateCommentRequest): + The request object. A request to create a comment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/orders/*}/comments", + "body": "comment", + }, + ] + request, metadata = self._interceptor.pre_create_comment(request, metadata) + pb_request = service.CreateCommentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_comment(resp) + return resp + + class _CreateHardware(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("CreateHardware") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateHardwareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create hardware method over HTTP. + + Args: + request (~.service.CreateHardwareRequest): + The request object. A request to create hardware. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/hardware", + "body": "hardware", + }, + ] + request, metadata = self._interceptor.pre_create_hardware(request, metadata) + pb_request = service.CreateHardwareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_hardware(resp) + return resp + + class _CreateHardwareGroup(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("CreateHardwareGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateHardwareGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create hardware group method over HTTP. + + Args: + request (~.service.CreateHardwareGroupRequest): + The request object. A request to create a hardware group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups", + "body": "hardware_group", + }, + ] + request, metadata = self._interceptor.pre_create_hardware_group( + request, metadata + ) + pb_request = service.CreateHardwareGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_hardware_group(resp) + return resp + + class _CreateOrder(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("CreateOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create order method over HTTP. + + Args: + request (~.service.CreateOrderRequest): + The request object. A request to create an order. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/orders", + "body": "order", + }, + ] + request, metadata = self._interceptor.pre_create_order(request, metadata) + pb_request = service.CreateOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_order(resp) + return resp + + class _CreateSite(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("CreateSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create site method over HTTP. + + Args: + request (~.service.CreateSiteRequest): + The request object. A request to create a site. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/sites", + "body": "site", + }, + ] + request, metadata = self._interceptor.pre_create_site(request, metadata) + pb_request = service.CreateSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_site(resp) + return resp + + class _CreateZone(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("CreateZone") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateZoneRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create zone method over HTTP. + + Args: + request (~.service.CreateZoneRequest): + The request object. A request to create a zone. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/zones", + "body": "zone", + }, + ] + request, metadata = self._interceptor.pre_create_zone(request, metadata) + pb_request = service.CreateZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_zone(resp) + return resp + + class _DeleteHardware(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("DeleteHardware") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteHardwareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete hardware method over HTTP. + + Args: + request (~.service.DeleteHardwareRequest): + The request object. A request to delete hardware. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/hardware/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_hardware(request, metadata) + pb_request = service.DeleteHardwareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_hardware(resp) + return resp + + class _DeleteHardwareGroup(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("DeleteHardwareGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteHardwareGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete hardware group method over HTTP. + + Args: + request (~.service.DeleteHardwareGroupRequest): + The request object. A request to delete a hardware group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_hardware_group( + request, metadata + ) + pb_request = service.DeleteHardwareGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_hardware_group(resp) + return resp + + class _DeleteOrder(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("DeleteOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete order method over HTTP. + + Args: + request (~.service.DeleteOrderRequest): + The request object. A request to delete an order. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_order(request, metadata) + pb_request = service.DeleteOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_order(resp) + return resp + + class _DeleteZone(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("DeleteZone") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteZoneRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete zone method over HTTP. + + Args: + request (~.service.DeleteZoneRequest): + The request object. A request to delete a zone. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/zones/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_zone(request, metadata) + pb_request = service.DeleteZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_zone(resp) + return resp + + class _GetChangeLogEntry(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetChangeLogEntry") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetChangeLogEntryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ChangeLogEntry: + r"""Call the get change log entry method over HTTP. + + Args: + request (~.service.GetChangeLogEntryRequest): + The request object. A request to get a change log entry. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.ChangeLogEntry: + A log entry of a change made to an + order. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*/changeLogEntries/*}", + }, + ] + request, metadata = self._interceptor.pre_get_change_log_entry( + request, metadata + ) + pb_request = service.GetChangeLogEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.ChangeLogEntry() + pb_resp = resources.ChangeLogEntry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_change_log_entry(resp) + return resp + + class _GetComment(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetComment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetCommentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Call the get comment method over HTTP. + + Args: + request (~.service.GetCommentRequest): + The request object. A request to get a comment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Comment: + A comment on an order. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_comment(request, metadata) + pb_request = service.GetCommentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Comment() + pb_resp = resources.Comment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_comment(resp) + return resp + + class _GetHardware(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetHardware") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetHardwareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Hardware: + r"""Call the get hardware method over HTTP. + + Args: + request (~.service.GetHardwareRequest): + The request object. A request to get hardware. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Hardware: + An instance of hardware installed at + a site. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/hardware/*}", + }, + ] + request, metadata = self._interceptor.pre_get_hardware(request, metadata) + pb_request = service.GetHardwareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Hardware() + pb_resp = resources.Hardware.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_hardware(resp) + return resp + + class _GetHardwareGroup(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetHardwareGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetHardwareGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.HardwareGroup: + r"""Call the get hardware group method over HTTP. + + Args: + request (~.service.GetHardwareGroupRequest): + The request object. A request to get a hardware group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.HardwareGroup: + A group of hardware that is part of + the same order, has the same SKU, and is + delivered to the same site. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_hardware_group( + request, metadata + ) + pb_request = service.GetHardwareGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.HardwareGroup() + pb_resp = resources.HardwareGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_hardware_group(resp) + return resp + + class _GetOrder(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Order: + r"""Call the get order method over HTTP. + + Args: + request (~.service.GetOrderRequest): + The request object. A request to get an order. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Order: + An order for GDC hardware. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*}", + }, + ] + request, metadata = self._interceptor.pre_get_order(request, metadata) + pb_request = service.GetOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Order() + pb_resp = resources.Order.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_order(resp) + return resp + + class _GetSite(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Site: + r"""Call the get site method over HTTP. + + Args: + request (~.service.GetSiteRequest): + The request object. A request to get a site. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Site: + A physical site where hardware will + be installed. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/sites/*}", + }, + ] + request, metadata = self._interceptor.pre_get_site(request, metadata) + pb_request = service.GetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Site() + pb_resp = resources.Site.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_site(resp) + return resp + + class _GetSku(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetSku") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetSkuRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Sku: + r"""Call the get sku method over HTTP. + + Args: + request (~.service.GetSkuRequest): + The request object. A request to get an SKU. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Sku: + A stock keeping unit (SKU) of GDC + hardware. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/skus/*}", + }, + ] + request, metadata = self._interceptor.pre_get_sku(request, metadata) + pb_request = service.GetSkuRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Sku() + pb_resp = resources.Sku.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_sku(resp) + return resp + + class _GetZone(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("GetZone") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetZoneRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Zone: + r"""Call the get zone method over HTTP. + + Args: + request (~.service.GetZoneRequest): + The request object. A request to get a zone. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Zone: + A zone holding a set of hardware. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/zones/*}", + }, + ] + request, metadata = self._interceptor.pre_get_zone(request, metadata) + pb_request = service.GetZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Zone() + pb_resp = resources.Zone.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_zone(resp) + return resp + + class _ListChangeLogEntries(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListChangeLogEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListChangeLogEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListChangeLogEntriesResponse: + r"""Call the list change log entries method over HTTP. + + Args: + request (~.service.ListChangeLogEntriesRequest): + The request object. A request to list change log entries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListChangeLogEntriesResponse: + A list of change log entries. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/orders/*}/changeLogEntries", + }, + ] + request, metadata = self._interceptor.pre_list_change_log_entries( + request, metadata + ) + pb_request = service.ListChangeLogEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListChangeLogEntriesResponse() + pb_resp = service.ListChangeLogEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_change_log_entries(resp) + return resp + + class _ListComments(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListComments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListCommentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListCommentsResponse: + r"""Call the list comments method over HTTP. + + Args: + request (~.service.ListCommentsRequest): + The request object. A request to list comments. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListCommentsResponse: + A request to list comments. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/orders/*}/comments", + }, + ] + request, metadata = self._interceptor.pre_list_comments(request, metadata) + pb_request = service.ListCommentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListCommentsResponse() + pb_resp = service.ListCommentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_comments(resp) + return resp + + class _ListHardware(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListHardware") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListHardwareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListHardwareResponse: + r"""Call the list hardware method over HTTP. + + Args: + request (~.service.ListHardwareRequest): + The request object. A request to list hardware. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListHardwareResponse: + A list of hardware. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/hardware", + }, + ] + request, metadata = self._interceptor.pre_list_hardware(request, metadata) + pb_request = service.ListHardwareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListHardwareResponse() + pb_resp = service.ListHardwareResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_hardware(resp) + return resp + + class _ListHardwareGroups(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListHardwareGroups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListHardwareGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListHardwareGroupsResponse: + r"""Call the list hardware groups method over HTTP. + + Args: + request (~.service.ListHardwareGroupsRequest): + The request object. A request to list hardware groups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListHardwareGroupsResponse: + A list of hardware groups. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups", + }, + ] + request, metadata = self._interceptor.pre_list_hardware_groups( + request, metadata + ) + pb_request = service.ListHardwareGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListHardwareGroupsResponse() + pb_resp = service.ListHardwareGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_hardware_groups(resp) + return resp + + class _ListOrders(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListOrders") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListOrdersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListOrdersResponse: + r"""Call the list orders method over HTTP. + + Args: + request (~.service.ListOrdersRequest): + The request object. A request to list orders. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListOrdersResponse: + A list of orders. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/orders", + }, + ] + request, metadata = self._interceptor.pre_list_orders(request, metadata) + pb_request = service.ListOrdersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListOrdersResponse() + pb_resp = service.ListOrdersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_orders(resp) + return resp + + class _ListSites(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListSitesResponse: + r"""Call the list sites method over HTTP. + + Args: + request (~.service.ListSitesRequest): + The request object. A request to list sites. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListSitesResponse: + A list of sites. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/sites", + }, + ] + request, metadata = self._interceptor.pre_list_sites(request, metadata) + pb_request = service.ListSitesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListSitesResponse() + pb_resp = service.ListSitesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_sites(resp) + return resp + + class _ListSkus(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListSkus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListSkusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListSkusResponse: + r"""Call the list skus method over HTTP. + + Args: + request (~.service.ListSkusRequest): + The request object. A request to list SKUs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListSkusResponse: + A list of SKUs. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/skus", + }, + ] + request, metadata = self._interceptor.pre_list_skus(request, metadata) + pb_request = service.ListSkusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListSkusResponse() + pb_resp = service.ListSkusResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_skus(resp) + return resp + + class _ListZones(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("ListZones") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListZonesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListZonesResponse: + r"""Call the list zones method over HTTP. + + Args: + request (~.service.ListZonesRequest): + The request object. A request to list zones. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListZonesResponse: + A list of zones. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/zones", + }, + ] + request, metadata = self._interceptor.pre_list_zones(request, metadata) + pb_request = service.ListZonesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListZonesResponse() + pb_resp = service.ListZonesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_zones(resp) + return resp + + class _SignalZoneState(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("SignalZoneState") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.SignalZoneStateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the signal zone state method over HTTP. + + Args: + request (~.service.SignalZoneStateRequest): + The request object. A request to signal the state of a + zone. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/zones/*}:signal", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_signal_zone_state( + request, metadata + ) + pb_request = service.SignalZoneStateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_signal_zone_state(resp) + return resp + + class _SubmitOrder(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("SubmitOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.SubmitOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the submit order method over HTTP. + + Args: + request (~.service.SubmitOrderRequest): + The request object. A request to submit an order. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*}:submit", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_submit_order(request, metadata) + pb_request = service.SubmitOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_order(resp) + return resp + + class _UpdateHardware(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("UpdateHardware") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateHardwareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update hardware method over HTTP. + + Args: + request (~.service.UpdateHardwareRequest): + The request object. A request to update hardware. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{hardware.name=projects/*/locations/*/hardware/*}", + "body": "hardware", + }, + ] + request, metadata = self._interceptor.pre_update_hardware(request, metadata) + pb_request = service.UpdateHardwareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_hardware(resp) + return resp + + class _UpdateHardwareGroup(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("UpdateHardwareGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateHardwareGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update hardware group method over HTTP. + + Args: + request (~.service.UpdateHardwareGroupRequest): + The request object. A request to update a hardware group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{hardware_group.name=projects/*/locations/*/orders/*/hardwareGroups/*}", + "body": "hardware_group", + }, + ] + request, metadata = self._interceptor.pre_update_hardware_group( + request, metadata + ) + pb_request = service.UpdateHardwareGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_hardware_group(resp) + return resp + + class _UpdateOrder(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("UpdateOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update order method over HTTP. + + Args: + request (~.service.UpdateOrderRequest): + The request object. A request to update an order. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{order.name=projects/*/locations/*/orders/*}", + "body": "order", + }, + ] + request, metadata = self._interceptor.pre_update_order(request, metadata) + pb_request = service.UpdateOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_order(resp) + return resp + + class _UpdateSite(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("UpdateSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update site method over HTTP. + + Args: + request (~.service.UpdateSiteRequest): + The request object. A request to update a site. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{site.name=projects/*/locations/*/sites/*}", + "body": "site", + }, + ] + request, metadata = self._interceptor.pre_update_site(request, metadata) + pb_request = service.UpdateSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_site(resp) + return resp + + class _UpdateZone(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("UpdateZone") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateZoneRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update zone method over HTTP. + + Args: + request (~.service.UpdateZoneRequest): + The request object. A request to update a zone. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{zone.name=projects/*/locations/*/zones/*}", + "body": "zone", + }, + ] + request, metadata = self._interceptor.pre_update_zone(request, metadata) + pb_request = service.UpdateZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_zone(resp) + return resp + + @property + def create_comment( + self, + ) -> Callable[[service.CreateCommentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateComment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_hardware( + self, + ) -> Callable[[service.CreateHardwareRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateHardware(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_hardware_group( + self, + ) -> Callable[[service.CreateHardwareGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateHardwareGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_order( + self, + ) -> Callable[[service.CreateOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateOrder(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_site( + self, + ) -> Callable[[service.CreateSiteRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_zone( + self, + ) -> Callable[[service.CreateZoneRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_hardware( + self, + ) -> Callable[[service.DeleteHardwareRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteHardware(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_hardware_group( + self, + ) -> Callable[[service.DeleteHardwareGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteHardwareGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_order( + self, + ) -> Callable[[service.DeleteOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteOrder(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_zone( + self, + ) -> Callable[[service.DeleteZoneRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_change_log_entry( + self, + ) -> Callable[[service.GetChangeLogEntryRequest], resources.ChangeLogEntry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChangeLogEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_comment(self) -> Callable[[service.GetCommentRequest], resources.Comment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetComment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_hardware( + self, + ) -> Callable[[service.GetHardwareRequest], resources.Hardware]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHardware(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_hardware_group( + self, + ) -> Callable[[service.GetHardwareGroupRequest], resources.HardwareGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHardwareGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_order(self) -> Callable[[service.GetOrderRequest], resources.Order]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrder(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_site(self) -> Callable[[service.GetSiteRequest], resources.Site]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_sku(self) -> Callable[[service.GetSkuRequest], resources.Sku]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSku(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_zone(self) -> Callable[[service.GetZoneRequest], resources.Zone]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_change_log_entries( + self, + ) -> Callable[ + [service.ListChangeLogEntriesRequest], service.ListChangeLogEntriesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChangeLogEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_comments( + self, + ) -> Callable[[service.ListCommentsRequest], service.ListCommentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListComments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_hardware( + self, + ) -> Callable[[service.ListHardwareRequest], service.ListHardwareResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListHardware(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_hardware_groups( + self, + ) -> Callable[ + [service.ListHardwareGroupsRequest], service.ListHardwareGroupsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListHardwareGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_orders( + self, + ) -> Callable[[service.ListOrdersRequest], service.ListOrdersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOrders(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sites( + self, + ) -> Callable[[service.ListSitesRequest], service.ListSitesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_skus( + self, + ) -> Callable[[service.ListSkusRequest], service.ListSkusResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSkus(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_zones( + self, + ) -> Callable[[service.ListZonesRequest], service.ListZonesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListZones(self._session, self._host, self._interceptor) # type: ignore + + @property + def signal_zone_state( + self, + ) -> Callable[[service.SignalZoneStateRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SignalZoneState(self._session, self._host, self._interceptor) # type: ignore + + @property + def submit_order( + self, + ) -> Callable[[service.SubmitOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitOrder(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_hardware( + self, + ) -> Callable[[service.UpdateHardwareRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateHardware(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_hardware_group( + self, + ) -> Callable[[service.UpdateHardwareGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateHardwareGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_order( + self, + ) -> Callable[[service.UpdateOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateOrder(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_site( + self, + ) -> Callable[[service.UpdateSiteRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_zone( + self, + ) -> Callable[[service.UpdateZoneRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(GDCHardwareManagementRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(GDCHardwareManagementRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(GDCHardwareManagementRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(GDCHardwareManagementRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(GDCHardwareManagementRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(GDCHardwareManagementRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GDCHardwareManagementRestTransport",) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py new file mode 100644 index 000000000000..920359d3bd38 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + ChangeLogEntry, + Comment, + Contact, + Dimensions, + Hardware, + HardwareConfig, + HardwareGroup, + HardwareInstallationInfo, + HardwareLocation, + HardwarePhysicalInfo, + Order, + OrganizationContact, + PowerSupply, + RackSpace, + Site, + Sku, + SkuConfig, + SkuInstance, + Subnet, + TimePeriod, + Zone, + ZoneNetworkConfig, +) +from .service import ( + CreateCommentRequest, + CreateHardwareGroupRequest, + CreateHardwareRequest, + CreateOrderRequest, + CreateSiteRequest, + CreateZoneRequest, + DeleteHardwareGroupRequest, + DeleteHardwareRequest, + DeleteOrderRequest, + DeleteZoneRequest, + GetChangeLogEntryRequest, + GetCommentRequest, + GetHardwareGroupRequest, + GetHardwareRequest, + GetOrderRequest, + GetSiteRequest, + GetSkuRequest, + GetZoneRequest, + ListChangeLogEntriesRequest, + ListChangeLogEntriesResponse, + ListCommentsRequest, + ListCommentsResponse, + ListHardwareGroupsRequest, + ListHardwareGroupsResponse, + ListHardwareRequest, + ListHardwareResponse, + ListOrdersRequest, + ListOrdersResponse, + ListSitesRequest, + ListSitesResponse, + ListSkusRequest, + ListSkusResponse, + ListZonesRequest, + ListZonesResponse, + OperationMetadata, + SignalZoneStateRequest, + SubmitOrderRequest, + UpdateHardwareGroupRequest, + UpdateHardwareRequest, + UpdateOrderRequest, + UpdateSiteRequest, + UpdateZoneRequest, +) + +__all__ = ( + "ChangeLogEntry", + "Comment", + "Contact", + "Dimensions", + "Hardware", + "HardwareConfig", + "HardwareGroup", + "HardwareInstallationInfo", + "HardwareLocation", + "HardwarePhysicalInfo", + "Order", + "OrganizationContact", + "RackSpace", + "Site", + "Sku", + "SkuConfig", + "SkuInstance", + "Subnet", + "TimePeriod", + "Zone", + "ZoneNetworkConfig", + "PowerSupply", + "CreateCommentRequest", + "CreateHardwareGroupRequest", + "CreateHardwareRequest", + "CreateOrderRequest", + "CreateSiteRequest", + "CreateZoneRequest", + "DeleteHardwareGroupRequest", + "DeleteHardwareRequest", + "DeleteOrderRequest", + "DeleteZoneRequest", + "GetChangeLogEntryRequest", + "GetCommentRequest", + "GetHardwareGroupRequest", + "GetHardwareRequest", + "GetOrderRequest", + "GetSiteRequest", + "GetSkuRequest", + "GetZoneRequest", + "ListChangeLogEntriesRequest", + "ListChangeLogEntriesResponse", + "ListCommentsRequest", + "ListCommentsResponse", + "ListHardwareGroupsRequest", + "ListHardwareGroupsResponse", + "ListHardwareRequest", + "ListHardwareResponse", + "ListOrdersRequest", + "ListOrdersResponse", + "ListSitesRequest", + "ListSitesResponse", + "ListSkusRequest", + "ListSkusResponse", + "ListZonesRequest", + "ListZonesResponse", + "OperationMetadata", + "SignalZoneStateRequest", + "SubmitOrderRequest", + "UpdateHardwareGroupRequest", + "UpdateHardwareRequest", + "UpdateOrderRequest", + "UpdateSiteRequest", + "UpdateZoneRequest", +) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py new file mode 100644 index 000000000000..2c0bf0948648 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py @@ -0,0 +1,1505 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.gdchardwaremanagement.v1alpha", + manifest={ + "PowerSupply", + "Order", + "Site", + "HardwareGroup", + "Hardware", + "Comment", + "ChangeLogEntry", + "Sku", + "Zone", + "OrganizationContact", + "Contact", + "HardwareConfig", + "SkuConfig", + "SkuInstance", + "HardwarePhysicalInfo", + "HardwareInstallationInfo", + "ZoneNetworkConfig", + "Subnet", + "TimePeriod", + "Dimensions", + "RackSpace", + "HardwareLocation", + }, +) + + +class PowerSupply(proto.Enum): + r"""The power supply options. + + Values: + POWER_SUPPLY_UNSPECIFIED (0): + Power supply is unspecified. + POWER_SUPPLY_AC (1): + AC power supply. + POWER_SUPPLY_DC (2): + DC power supply. + """ + POWER_SUPPLY_UNSPECIFIED = 0 + POWER_SUPPLY_AC = 1 + POWER_SUPPLY_DC = 2 + + +class Order(proto.Message): + r"""An order for GDC hardware. + + Attributes: + name (str): + Identifier. Name of this order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + display_name (str): + Optional. Display name of this order. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this order was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this order was last + updated. + labels (MutableMapping[str, str]): + Optional. Labels associated with this order as key value + pairs. For more information about labels, see `Create and + manage + labels `__. + state (google.cloud.gdchardwaremanagement_v1alpha.types.Order.State): + Output only. State of this order. On order + creation, state will be set to DRAFT. + organization_contact (google.cloud.gdchardwaremanagement_v1alpha.types.OrganizationContact): + Required. Customer contact information. + target_workloads (MutableSequence[str]): + Optional. Customer specified workloads of + interest targeted by this order. This must + contain <= 20 elements and the length of each + element must be <= 50 characters. + customer_motivation (str): + Required. Information about the customer's + motivation for this order. The length of this + field must be <= 1000 characters. + fulfillment_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Customer specified deadline by when + this order should be fulfilled. + region_code (str): + Required. `Unicode CLDR `__ region + code where this order will be deployed. For a list of valid + CLDR region codes, see the `Language Subtag + Registry `__. + order_form_uri (str): + Output only. Link to the order form. + type_ (google.cloud.gdchardwaremanagement_v1alpha.types.Order.Type): + Output only. Type of this Order. + submit_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the order was + submitted. Is auto-populated to the current time + when an order is submitted. + billing_id (str): + Required. The Google Cloud Billing ID to be + charged for this order. + existing_hardware (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.HardwareLocation]): + Optional. Existing hardware to be removed as + part of this order. Note: any hardware removed + will be recycled unless otherwise agreed. + """ + + class State(proto.Enum): + r"""Valid states of an order. + + Values: + STATE_UNSPECIFIED (0): + State of the order is unspecified. + DRAFT (1): + Order is being drafted by the customer and + has not been submitted yet. + SUBMITTED (2): + Order has been submitted to Google. + ACCEPTED (3): + Order has been accepted by Google. + ADDITIONAL_INFO_NEEDED (4): + Order needs more information from the + customer. + BUILDING (5): + Google has initiated building hardware for + the order. + SHIPPING (6): + The hardware has been built and is being + shipped. + INSTALLING (7): + The hardware is being installed. + FAILED (8): + An error occurred in processing the order and + customer intervention is required. + PARTIALLY_COMPLETED (9): + Order has been partially completed i.e., some + hardware have been delivered and installed. + COMPLETED (10): + Order has been completed. + CANCELLED (11): + Order has been cancelled. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + SUBMITTED = 2 + ACCEPTED = 3 + ADDITIONAL_INFO_NEEDED = 4 + BUILDING = 5 + SHIPPING = 6 + INSTALLING = 7 + FAILED = 8 + PARTIALLY_COMPLETED = 9 + COMPLETED = 10 + CANCELLED = 11 + + class Type(proto.Enum): + r"""Valid types of an Order. + + Values: + TYPE_UNSPECIFIED (0): + Type of the order is unspecified. + PAID (1): + Paid by the customer. + POC (2): + Proof of concept for the customer. + """ + TYPE_UNSPECIFIED = 0 + PAID = 1 + POC = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=13, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + organization_contact: "OrganizationContact" = proto.Field( + proto.MESSAGE, + number=6, + message="OrganizationContact", + ) + target_workloads: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + customer_motivation: str = proto.Field( + proto.STRING, + number=8, + ) + fulfillment_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + region_code: str = proto.Field( + proto.STRING, + number=10, + ) + order_form_uri: str = proto.Field( + proto.STRING, + number=11, + ) + type_: Type = proto.Field( + proto.ENUM, + number=12, + enum=Type, + ) + submit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + billing_id: str = proto.Field( + proto.STRING, + number=15, + ) + existing_hardware: MutableSequence["HardwareLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message="HardwareLocation", + ) + + +class Site(proto.Message): + r"""A physical site where hardware will be installed. + + Attributes: + name (str): + Identifier. Name of the site. Format: + ``projects/{project}/locations/{location}/sites/{site}`` + display_name (str): + Optional. Display name of this Site. + description (str): + Optional. Description of this Site. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this site was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this site was last + updated. + labels (MutableMapping[str, str]): + Optional. Labels associated with this site as key value + pairs. For more information about labels, see `Create and + manage + labels `__. + organization_contact (google.cloud.gdchardwaremanagement_v1alpha.types.OrganizationContact): + Required. Contact information for this site. + google_maps_pin_uri (str): + Required. A URL to the Google Maps address location of the + site. An example value is ``https://goo.gl/maps/xxxxxxxxx``. + access_times (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.TimePeriod]): + Optional. The time periods when the site is + accessible. If this field is empty, the site is + accessible at all times. + notes (str): + Optional. Any additional notes for this Site. + Please include information about: + + - security or access restrictions + - any regulations affecting the technicians + visiting the site + - any special process or approval required to + move the equipment + - whether a representative will be available + during site visits + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=24, + ) + description: str = proto.Field( + proto.STRING, + number=25, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + organization_contact: "OrganizationContact" = proto.Field( + proto.MESSAGE, + number=5, + message="OrganizationContact", + ) + google_maps_pin_uri: str = proto.Field( + proto.STRING, + number=6, + ) + access_times: MutableSequence["TimePeriod"] = proto.RepeatedField( + proto.MESSAGE, + number=26, + message="TimePeriod", + ) + notes: str = proto.Field( + proto.STRING, + number=27, + ) + + +class HardwareGroup(proto.Message): + r"""A group of hardware that is part of the same order, has the + same SKU, and is delivered to the same site. + + Attributes: + name (str): + Identifier. Name of this hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this hardware group + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this hardware group + was last updated. + labels (MutableMapping[str, str]): + Optional. Labels associated with this hardware group as key + value pairs. For more information about labels, see `Create + and manage + labels `__. + hardware_count (int): + Required. Number of hardware in this + HardwareGroup. + config (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareConfig): + Required. Configuration for hardware in this + HardwareGroup. + site (str): + Required. Name of the site where the hardware in this + HardwareGroup will be delivered. Format: + ``projects/{project}/locations/{location}/sites/{site}`` + state (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup.State): + Output only. Current state of this + HardwareGroup. + zone (str): + Optional. Name of the zone that the hardware in this + HardwareGroup belongs to. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + requested_installation_date (google.type.date_pb2.Date): + Optional. Requested installation date for the + hardware in this HardwareGroup. Filled in by the + customer. + """ + + class State(proto.Enum): + r"""Valid states of a HardwareGroup. + + Values: + STATE_UNSPECIFIED (0): + State of the HardwareGroup is unspecified. + ADDITIONAL_INFO_NEEDED (1): + More information is required from the + customer to make progress. + BUILDING (2): + Google has initiated building hardware for + this HardwareGroup. + SHIPPING (3): + The hardware has been built and is being + shipped. + INSTALLING (4): + The hardware is being installed. + PARTIALLY_INSTALLED (5): + Some hardware in the HardwareGroup have been + installed. + INSTALLED (6): + All hardware in the HardwareGroup have been + installed. + FAILED (7): + An error occurred and customer intervention + is required. + """ + STATE_UNSPECIFIED = 0 + ADDITIONAL_INFO_NEEDED = 1 + BUILDING = 2 + SHIPPING = 3 + INSTALLING = 4 + PARTIALLY_INSTALLED = 5 + INSTALLED = 6 + FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + hardware_count: int = proto.Field( + proto.INT32, + number=5, + ) + config: "HardwareConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="HardwareConfig", + ) + site: str = proto.Field( + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + zone: str = proto.Field( + proto.STRING, + number=9, + ) + requested_installation_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=10, + message=date_pb2.Date, + ) + + +class Hardware(proto.Message): + r"""An instance of hardware installed at a site. + + Attributes: + name (str): + Identifier. Name of this hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + display_name (str): + Optional. Display name for this hardware. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this hardware was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this hardware was last + updated. + labels (MutableMapping[str, str]): + Optional. Labels associated with this hardware as key value + pairs. For more information about labels, see `Create and + manage + labels `__. + order (str): + Required. Name of the order that this hardware belongs to. + Format: + ``projects/{project}/locations/{location}/orders/{order}`` + hardware_group (str): + Output only. Name for the hardware group that this hardware + belongs to. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + site (str): + Required. Name for the site that this hardware belongs to. + Format: + ``projects/{project}/locations/{location}/sites/{site}`` + state (google.cloud.gdchardwaremanagement_v1alpha.types.Hardware.State): + Output only. Current state for this hardware. + ciq_uri (str): + Output only. Link to the Customer Intake + Questionnaire (CIQ) sheet for this Hardware. + config (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareConfig): + Required. Configuration for this hardware. + estimated_installation_date (google.type.date_pb2.Date): + Output only. Estimated installation date for + this hardware. + physical_info (google.cloud.gdchardwaremanagement_v1alpha.types.HardwarePhysicalInfo): + Optional. Physical properties of this + hardware. + installation_info (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareInstallationInfo): + Optional. Information for installation of + this hardware. + zone (str): + Required. Name for the zone that this hardware belongs to. + Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + requested_installation_date (google.type.date_pb2.Date): + Optional. Requested installation date for + this hardware. This is auto-populated when the + order is accepted, if the hardware's + HardwareGroup specifies this. It can also be + filled in by the customer. + actual_installation_date (google.type.date_pb2.Date): + Output only. Actual installation date for + this hardware. Filled in by Google. + """ + + class State(proto.Enum): + r"""Valid states for hardware. + + Values: + STATE_UNSPECIFIED (0): + State of the Hardware is unspecified. + ADDITIONAL_INFO_NEEDED (1): + More information is required from the + customer to make progress. + BUILDING (2): + Google has initiated building hardware for + this Hardware. + SHIPPING (3): + The hardware has been built and is being + shipped. + INSTALLING (4): + The hardware is being installed. + INSTALLED (5): + The hardware has been installed. + FAILED (6): + An error occurred and customer intervention + is required. + """ + STATE_UNSPECIFIED = 0 + ADDITIONAL_INFO_NEEDED = 1 + BUILDING = 2 + SHIPPING = 3 + INSTALLING = 4 + INSTALLED = 5 + FAILED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + order: str = proto.Field( + proto.STRING, + number=6, + ) + hardware_group: str = proto.Field( + proto.STRING, + number=7, + ) + site: str = proto.Field( + proto.STRING, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + ciq_uri: str = proto.Field( + proto.STRING, + number=10, + ) + config: "HardwareConfig" = proto.Field( + proto.MESSAGE, + number=11, + message="HardwareConfig", + ) + estimated_installation_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=12, + message=date_pb2.Date, + ) + physical_info: "HardwarePhysicalInfo" = proto.Field( + proto.MESSAGE, + number=13, + message="HardwarePhysicalInfo", + ) + installation_info: "HardwareInstallationInfo" = proto.Field( + proto.MESSAGE, + number=14, + message="HardwareInstallationInfo", + ) + zone: str = proto.Field( + proto.STRING, + number=15, + ) + requested_installation_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=16, + message=date_pb2.Date, + ) + actual_installation_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=17, + message=date_pb2.Date, + ) + + +class Comment(proto.Message): + r"""A comment on an order. + + Attributes: + name (str): + Identifier. Name of this comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this comment was + created. + labels (MutableMapping[str, str]): + Optional. Labels associated with this comment as key value + pairs. For more information about labels, see `Create and + manage + labels `__. + author (str): + Output only. Username of the author of this + comment. This is auto-populated from the + credentials used during creation of the comment. + text (str): + Required. Text of this comment. The length of + text must be <= 1000 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + author: str = proto.Field( + proto.STRING, + number=4, + ) + text: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ChangeLogEntry(proto.Message): + r"""A log entry of a change made to an order. + + Attributes: + name (str): + Identifier. Name of this change log entry. Format: + ``projects/{project}/locations/{location}/orders/{order}/changeLogEntries/{change_log_entry}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this change log entry + was created. + labels (MutableMapping[str, str]): + Optional. Labels associated with this change log entry as + key value pairs. For more information about labels, see + `Create and manage + labels `__. + log (str): + Output only. Content of this log entry. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + log: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sku(proto.Message): + r"""A stock keeping unit (SKU) of GDC hardware. + + Attributes: + name (str): + Identifier. Name of this SKU. Format: + ``projects/{project}/locations/{location}/skus/{sku}`` + display_name (str): + Output only. Display name of this SKU. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this SKU was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this SKU was last + updated. + config (google.cloud.gdchardwaremanagement_v1alpha.types.SkuConfig): + Output only. Configuration for this SKU. + instances (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.SkuInstance]): + Output only. Available instances of this SKU. + This field should be used for checking + availability of a SKU. + description (str): + Output only. Description of this SKU. + revision_id (str): + Output only. The SKU revision ID. A new revision is created + whenever ``config`` is updated. The format is an 8-character + hexadecimal string. + is_active (bool): + Output only. Flag to indicate whether or not + this revision is active. Only an active revision + can be used in a new Order. + type_ (google.cloud.gdchardwaremanagement_v1alpha.types.Sku.Type): + Output only. Type of this SKU. + vcpu_count (int): + Output only. The vCPU count associated with + this SKU. + """ + + class Type(proto.Enum): + r"""Valid types of a SKU. + + Values: + TYPE_UNSPECIFIED (0): + Type of the SKU is unspecified. This is not + an allowed value. + RACK (1): + Rack SKU. + SERVER (2): + Server SKU. + """ + TYPE_UNSPECIFIED = 0 + RACK = 1 + SERVER = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + config: "SkuConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="SkuConfig", + ) + instances: MutableSequence["SkuInstance"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="SkuInstance", + ) + description: str = proto.Field( + proto.STRING, + number=8, + ) + revision_id: str = proto.Field( + proto.STRING, + number=9, + ) + is_active: bool = proto.Field( + proto.BOOL, + number=10, + ) + type_: Type = proto.Field( + proto.ENUM, + number=11, + enum=Type, + ) + vcpu_count: int = proto.Field( + proto.INT32, + number=12, + ) + + +class Zone(proto.Message): + r"""A zone holding a set of hardware. + + Attributes: + name (str): + Identifier. Name of this zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this zone was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this zone was last + updated. + labels (MutableMapping[str, str]): + Optional. Labels associated with this zone as key value + pairs. For more information about labels, see `Create and + manage + labels `__. + display_name (str): + Optional. Human friendly display name of this + zone. + state (google.cloud.gdchardwaremanagement_v1alpha.types.Zone.State): + Output only. Current state for this zone. + contacts (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Contact]): + Required. The points of contact. + ciq_uri (str): + Output only. Link to the Customer Intake + Questionnaire (CIQ) sheet for this zone. + network_config (google.cloud.gdchardwaremanagement_v1alpha.types.ZoneNetworkConfig): + Optional. Networking configuration for this + zone. + globally_unique_id (str): + Output only. Globally unique identifier + generated for this Edge Zone. + """ + + class State(proto.Enum): + r"""Valid states for a zone. + + Values: + STATE_UNSPECIFIED (0): + State of the Zone is unspecified. + ADDITIONAL_INFO_NEEDED (1): + More information is required from the + customer to make progress. + PREPARING (2): + Google is preparing the Zone. + READY_FOR_CUSTOMER_FACTORY_TURNUP_CHECKS (5): + Factory turnup has succeeded. + READY_FOR_SITE_TURNUP (6): + The Zone is ready for site turnup. + CUSTOMER_FACTORY_TURNUP_CHECKS_FAILED (7): + The Zone failed in factory turnup checks. + ACTIVE (3): + The Zone is available to use. + CANCELLED (4): + The Zone has been cancelled. + """ + STATE_UNSPECIFIED = 0 + ADDITIONAL_INFO_NEEDED = 1 + PREPARING = 2 + READY_FOR_CUSTOMER_FACTORY_TURNUP_CHECKS = 5 + READY_FOR_SITE_TURNUP = 6 + CUSTOMER_FACTORY_TURNUP_CHECKS_FAILED = 7 + ACTIVE = 3 + CANCELLED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + contacts: MutableSequence["Contact"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="Contact", + ) + ciq_uri: str = proto.Field( + proto.STRING, + number=10, + ) + network_config: "ZoneNetworkConfig" = proto.Field( + proto.MESSAGE, + number=11, + message="ZoneNetworkConfig", + ) + globally_unique_id: str = proto.Field( + proto.STRING, + number=12, + ) + + +class OrganizationContact(proto.Message): + r"""Contact information of the customer organization. + + Attributes: + address (google.type.postal_address_pb2.PostalAddress): + Required. The organization's address. + email (str): + Optional. The organization's email. + phone (str): + Optional. The organization's phone number. + contacts (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Contact]): + Required. The individual points of contact in + the organization at this location. + """ + + address: postal_address_pb2.PostalAddress = proto.Field( + proto.MESSAGE, + number=1, + message=postal_address_pb2.PostalAddress, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + phone: str = proto.Field( + proto.STRING, + number=3, + ) + contacts: MutableSequence["Contact"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Contact", + ) + + +class Contact(proto.Message): + r"""Contact details of a point of contact. + + Attributes: + given_name (str): + Required. Given name of the contact. + family_name (str): + Optional. Family name of the contact. + email (str): + Required. Email of the contact. + phone (str): + Required. Phone number of the contact. + time_zone (google.type.datetime_pb2.TimeZone): + Optional. Time zone of the contact. + reachable_times (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.TimePeriod]): + Optional. The time periods when the contact + is reachable. If this field is empty, the + contact is reachable at all times. + """ + + given_name: str = proto.Field( + proto.STRING, + number=1, + ) + family_name: str = proto.Field( + proto.STRING, + number=2, + ) + email: str = proto.Field( + proto.STRING, + number=3, + ) + phone: str = proto.Field( + proto.STRING, + number=4, + ) + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=5, + message=datetime_pb2.TimeZone, + ) + reachable_times: MutableSequence["TimePeriod"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="TimePeriod", + ) + + +class HardwareConfig(proto.Message): + r"""Configuration for GDC hardware. + + Attributes: + sku (str): + Required. Reference to the SKU for this hardware. This can + point to a specific SKU revision in the form of + ``resource_name@revision_id`` as defined in + `AIP-162 `__. If no revision_id + is specified, it refers to the latest revision. + power_supply (google.cloud.gdchardwaremanagement_v1alpha.types.PowerSupply): + Required. Power supply type for this + hardware. + subscription_duration_months (int): + Optional. Subscription duration for the + hardware in months. + """ + + sku: str = proto.Field( + proto.STRING, + number=1, + ) + power_supply: "PowerSupply" = proto.Field( + proto.ENUM, + number=2, + enum="PowerSupply", + ) + subscription_duration_months: int = proto.Field( + proto.INT32, + number=3, + ) + + +class SkuConfig(proto.Message): + r"""Configuration for a SKU. + + Attributes: + cpu (str): + Information about CPU configuration. + gpu (str): + Information about GPU configuration. + ram (str): + Information about RAM configuration. + storage (str): + Information about storage configuration. + """ + + cpu: str = proto.Field( + proto.STRING, + number=1, + ) + gpu: str = proto.Field( + proto.STRING, + number=2, + ) + ram: str = proto.Field( + proto.STRING, + number=3, + ) + storage: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SkuInstance(proto.Message): + r"""A specific instance of the SKU. + + Attributes: + region_code (str): + The `Unicode CLDR `__ region code + where this instance is available. + power_supply (google.cloud.gdchardwaremanagement_v1alpha.types.PowerSupply): + Power supply type for this instance. + billing_sku (str): + Reference to the corresponding SKU in the Cloud Billing API. + The estimated price information can be retrieved using that + API. Format: ``services/{service}/skus/{sku}`` + billing_sku_per_vcpu (str): + Reference to the corresponding SKU per vCPU in the Cloud + Billing API. The estimated price information can be + retrieved using that API. Format: + ``services/{service}/skus/{sku}`` + subscription_duration_months (int): + Subscription duration for the hardware in + months. + """ + + region_code: str = proto.Field( + proto.STRING, + number=1, + ) + power_supply: "PowerSupply" = proto.Field( + proto.ENUM, + number=2, + enum="PowerSupply", + ) + billing_sku: str = proto.Field( + proto.STRING, + number=3, + ) + billing_sku_per_vcpu: str = proto.Field( + proto.STRING, + number=4, + ) + subscription_duration_months: int = proto.Field( + proto.INT32, + number=5, + ) + + +class HardwarePhysicalInfo(proto.Message): + r"""Physical properties of a hardware. + + Attributes: + power_receptacle (google.cloud.gdchardwaremanagement_v1alpha.types.HardwarePhysicalInfo.PowerReceptacleType): + Required. The power receptacle type. + network_uplink (google.cloud.gdchardwaremanagement_v1alpha.types.HardwarePhysicalInfo.NetworkUplinkType): + Required. Type of the uplink network + connection. + voltage (google.cloud.gdchardwaremanagement_v1alpha.types.HardwarePhysicalInfo.Voltage): + Required. Voltage of the power supply. + amperes (google.cloud.gdchardwaremanagement_v1alpha.types.HardwarePhysicalInfo.Amperes): + Required. Amperes of the power supply. + """ + + class PowerReceptacleType(proto.Enum): + r"""Valid power receptacle types. + + Values: + POWER_RECEPTACLE_TYPE_UNSPECIFIED (0): + Facility plug type is unspecified. + NEMA_5_15 (1): + NEMA 5-15. + C_13 (2): + C13. + STANDARD_EU (3): + Standard european receptacle. + """ + POWER_RECEPTACLE_TYPE_UNSPECIFIED = 0 + NEMA_5_15 = 1 + C_13 = 2 + STANDARD_EU = 3 + + class NetworkUplinkType(proto.Enum): + r"""Valid network uplink types. + + Values: + NETWORK_UPLINK_TYPE_UNSPECIFIED (0): + Network uplink type is unspecified. + RJ_45 (1): + RJ-45. + """ + NETWORK_UPLINK_TYPE_UNSPECIFIED = 0 + RJ_45 = 1 + + class Voltage(proto.Enum): + r"""Valid voltage values. + + Values: + VOLTAGE_UNSPECIFIED (0): + Voltage is unspecified. + VOLTAGE_110 (1): + 120V. + VOLTAGE_220 (3): + 220V. + """ + VOLTAGE_UNSPECIFIED = 0 + VOLTAGE_110 = 1 + VOLTAGE_220 = 3 + + class Amperes(proto.Enum): + r"""Valid amperes values. + + Values: + AMPERES_UNSPECIFIED (0): + Amperes is unspecified. + AMPERES_15 (1): + 15A. + """ + AMPERES_UNSPECIFIED = 0 + AMPERES_15 = 1 + + power_receptacle: PowerReceptacleType = proto.Field( + proto.ENUM, + number=1, + enum=PowerReceptacleType, + ) + network_uplink: NetworkUplinkType = proto.Field( + proto.ENUM, + number=2, + enum=NetworkUplinkType, + ) + voltage: Voltage = proto.Field( + proto.ENUM, + number=3, + enum=Voltage, + ) + amperes: Amperes = proto.Field( + proto.ENUM, + number=4, + enum=Amperes, + ) + + +class HardwareInstallationInfo(proto.Message): + r"""Information for installation of a Hardware. + + Attributes: + rack_location (str): + Optional. Location of the rack in the site + e.g. Floor 2, Room 201, Row 7, Rack 3. + power_distance_meters (int): + Required. Distance from the power outlet in + meters. + switch_distance_meters (int): + Required. Distance from the network switch in + meters. + rack_unit_dimensions (google.cloud.gdchardwaremanagement_v1alpha.types.Dimensions): + Required. Dimensions of the rack unit. + rack_space (google.cloud.gdchardwaremanagement_v1alpha.types.RackSpace): + Required. Rack space allocated for the + hardware. + rack_type (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareInstallationInfo.RackType): + Required. Type of the rack. + """ + + class RackType(proto.Enum): + r"""Valid rack types. + + Values: + RACK_TYPE_UNSPECIFIED (0): + Rack type is unspecified. + TWO_POST (1): + Two post rack. + FOUR_POST (2): + Four post rack. + """ + RACK_TYPE_UNSPECIFIED = 0 + TWO_POST = 1 + FOUR_POST = 2 + + rack_location: str = proto.Field( + proto.STRING, + number=1, + ) + power_distance_meters: int = proto.Field( + proto.INT32, + number=2, + ) + switch_distance_meters: int = proto.Field( + proto.INT32, + number=3, + ) + rack_unit_dimensions: "Dimensions" = proto.Field( + proto.MESSAGE, + number=4, + message="Dimensions", + ) + rack_space: "RackSpace" = proto.Field( + proto.MESSAGE, + number=5, + message="RackSpace", + ) + rack_type: RackType = proto.Field( + proto.ENUM, + number=6, + enum=RackType, + ) + + +class ZoneNetworkConfig(proto.Message): + r"""Networking configuration for a zone. + + Attributes: + machine_mgmt_ipv4_range (str): + Required. An IPv4 address block for machine management. + Should be a private RFC1918 or public CIDR block large + enough to allocate at least one address per machine in the + Zone. Should be in ``management_ipv4_subnet``, and disjoint + with other address ranges. + kubernetes_node_ipv4_range (str): + Required. An IPv4 address block for kubernetes nodes. Should + be a private RFC1918 or public CIDR block large enough to + allocate at least one address per machine in the Zone. + Should be in ``kubernetes_ipv4_subnet``, and disjoint with + other address ranges. + kubernetes_control_plane_ipv4_range (str): + Required. An IPv4 address block for kubernetes control + plane. Should be a private RFC1918 or public CIDR block + large enough to allocate at least one address per cluster in + the Zone. Should be in ``kubernetes_ipv4_subnet``, and + disjoint with other address ranges. + management_ipv4_subnet (google.cloud.gdchardwaremanagement_v1alpha.types.Subnet): + Required. An IPv4 subnet for the management + network. + kubernetes_ipv4_subnet (google.cloud.gdchardwaremanagement_v1alpha.types.Subnet): + Optional. An IPv4 subnet for the kubernetes + network. If unspecified, the kubernetes subnet + will be the same as the management subnet. + """ + + machine_mgmt_ipv4_range: str = proto.Field( + proto.STRING, + number=1, + ) + kubernetes_node_ipv4_range: str = proto.Field( + proto.STRING, + number=2, + ) + kubernetes_control_plane_ipv4_range: str = proto.Field( + proto.STRING, + number=3, + ) + management_ipv4_subnet: "Subnet" = proto.Field( + proto.MESSAGE, + number=4, + message="Subnet", + ) + kubernetes_ipv4_subnet: "Subnet" = proto.Field( + proto.MESSAGE, + number=5, + message="Subnet", + ) + + +class Subnet(proto.Message): + r"""Represents a subnet. + + Attributes: + address_range (str): + Required. Address range for this subnet in + CIDR notation. + default_gateway_ip_address (str): + Required. Default gateway for this subnet. + """ + + address_range: str = proto.Field( + proto.STRING, + number=1, + ) + default_gateway_ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TimePeriod(proto.Message): + r"""Represents a time period in a week. + + Attributes: + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. The start of the time period. + end_time (google.type.timeofday_pb2.TimeOfDay): + Required. The end of the time period. + days (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Required. The days of the week that the time + period is active. + """ + + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=1, + message=timeofday_pb2.TimeOfDay, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + days: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class Dimensions(proto.Message): + r"""Represents the dimensions of an object. + + Attributes: + width_inches (float): + Required. Width in inches. + height_inches (float): + Required. Height in inches. + depth_inches (float): + Required. Depth in inches. + """ + + width_inches: float = proto.Field( + proto.FLOAT, + number=1, + ) + height_inches: float = proto.Field( + proto.FLOAT, + number=2, + ) + depth_inches: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RackSpace(proto.Message): + r"""Represents contiguous space in a rack. + + Attributes: + start_rack_unit (int): + Required. First rack unit of the rack space + (inclusive). + end_rack_unit (int): + Required. Last rack unit of the rack space + (inclusive). + """ + + start_rack_unit: int = proto.Field( + proto.INT32, + number=1, + ) + end_rack_unit: int = proto.Field( + proto.INT32, + number=2, + ) + + +class HardwareLocation(proto.Message): + r"""Represents the location of one or many hardware. + + Attributes: + site (str): + Required. Name of the site where the hardware are present. + Format: + ``projects/{project}/locations/{location}/sites/{site}`` + rack_location (str): + Required. Location of the rack in the site + e.g. Floor 2, Room 201, Row 7, Rack 3. + rack_space (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.RackSpace]): + Optional. Spaces occupied by the hardware in + the rack. If unset, this location is assumed to + be the entire rack. + """ + + site: str = proto.Field( + proto.STRING, + number=1, + ) + rack_location: str = proto.Field( + proto.STRING, + number=2, + ) + rack_space: MutableSequence["RackSpace"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="RackSpace", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py new file mode 100644 index 000000000000..9821128023c7 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py @@ -0,0 +1,1443 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.gdchardwaremanagement_v1alpha.types import resources + +__protobuf__ = proto.module( + package="google.cloud.gdchardwaremanagement.v1alpha", + manifest={ + "ListOrdersRequest", + "ListOrdersResponse", + "GetOrderRequest", + "CreateOrderRequest", + "UpdateOrderRequest", + "DeleteOrderRequest", + "SubmitOrderRequest", + "ListSitesRequest", + "ListSitesResponse", + "GetSiteRequest", + "CreateSiteRequest", + "UpdateSiteRequest", + "ListHardwareGroupsRequest", + "ListHardwareGroupsResponse", + "GetHardwareGroupRequest", + "CreateHardwareGroupRequest", + "UpdateHardwareGroupRequest", + "DeleteHardwareGroupRequest", + "ListHardwareRequest", + "ListHardwareResponse", + "GetHardwareRequest", + "CreateHardwareRequest", + "UpdateHardwareRequest", + "DeleteHardwareRequest", + "ListCommentsRequest", + "ListCommentsResponse", + "GetCommentRequest", + "CreateCommentRequest", + "ListChangeLogEntriesRequest", + "ListChangeLogEntriesResponse", + "GetChangeLogEntryRequest", + "ListSkusRequest", + "ListSkusResponse", + "GetSkuRequest", + "ListZonesRequest", + "ListZonesResponse", + "GetZoneRequest", + "CreateZoneRequest", + "UpdateZoneRequest", + "DeleteZoneRequest", + "SignalZoneStateRequest", + "OperationMetadata", + }, +) + + +class ListOrdersRequest(proto.Message): + r"""A request to list orders. + + Attributes: + parent (str): + Required. The project and location to list orders in. + Format: ``projects/{project}/locations/{location}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListOrdersResponse(proto.Message): + r"""A list of orders. + + Attributes: + orders (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Order]): + The list of orders. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + orders: MutableSequence[resources.Order] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Order, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetOrderRequest(proto.Message): + r"""A request to get an order. + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateOrderRequest(proto.Message): + r"""A request to create an order. + + Attributes: + parent (str): + Required. The project and location to create the order in. + Format: ``projects/{project}/locations/{location}`` + order_id (str): + Optional. ID used to uniquely identify the Order within its + parent scope. This field should contain at most 63 + characters and must start with lowercase characters. Only + lowercase characters, numbers and ``-`` are accepted. The + ``-`` character cannot be the first or the last one. A + system generated ID will be used if the field is not set. + + The order.name field in the request will be ignored. + order (google.cloud.gdchardwaremanagement_v1alpha.types.Order): + Required. The order to create. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + order_id: str = proto.Field( + proto.STRING, + number=2, + ) + order: resources.Order = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Order, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateOrderRequest(proto.Message): + r"""A request to update an order. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Order to + overwrite with this update. The fields specified in the + update_mask are relative to the order, not the full request. + A field will be overwritten if it is in the mask. If you + don't provide a mask then all fields will be overwritten. + order (google.cloud.gdchardwaremanagement_v1alpha.types.Order): + Required. The order to update. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + order: resources.Order = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Order, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteOrderRequest(proto.Message): + r"""A request to delete an order. + + Attributes: + name (str): + Required. The name of the order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + force (bool): + Optional. An option to delete any nested + resources in the Order, such as a HardwareGroup. + If true, any nested resources for this Order + will also be deleted. Otherwise, the request + will only succeed if the Order has no nested + resources. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class SubmitOrderRequest(proto.Message): + r"""A request to submit an order. + + Attributes: + name (str): + Required. The name of the order. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSitesRequest(proto.Message): + r"""A request to list sites. + + Attributes: + parent (str): + Required. The project and location to list sites in. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListSitesResponse(proto.Message): + r"""A list of sites. + + Attributes: + sites (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Site]): + The list of sites. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + sites: MutableSequence[resources.Site] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Site, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetSiteRequest(proto.Message): + r"""A request to get a site. + + Attributes: + name (str): + Required. The name of the site. Format: + ``projects/{project}/locations/{location}/sites/{site}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSiteRequest(proto.Message): + r"""A request to create a site. + + Attributes: + parent (str): + Required. The project and location to create the site in. + Format: ``projects/{project}/locations/{location}`` + site_id (str): + Optional. ID used to uniquely identify the Site within its + parent scope. This field should contain at most 63 + characters and must start with lowercase characters. Only + lowercase characters, numbers and ``-`` are accepted. The + ``-`` character cannot be the first or the last one. A + system generated ID will be used if the field is not set. + + The site.name field in the request will be ignored. + site (google.cloud.gdchardwaremanagement_v1alpha.types.Site): + Required. The site to create. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + site_id: str = proto.Field( + proto.STRING, + number=2, + ) + site: resources.Site = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Site, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateSiteRequest(proto.Message): + r"""A request to update a site. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Site to + overwrite with this update. The fields specified in the + update_mask are relative to the site, not the full request. + A field will be overwritten if it is in the mask. If you + don't provide a mask then all fields will be overwritten. + site (google.cloud.gdchardwaremanagement_v1alpha.types.Site): + Required. The site to update. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + site: resources.Site = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Site, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListHardwareGroupsRequest(proto.Message): + r"""A request to list hardware groups. + + Attributes: + parent (str): + Required. The order to list hardware groups in. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListHardwareGroupsResponse(proto.Message): + r"""A list of hardware groups. + + Attributes: + hardware_groups (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup]): + The list of hardware groups. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + hardware_groups: MutableSequence[resources.HardwareGroup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.HardwareGroup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetHardwareGroupRequest(proto.Message): + r"""A request to get a hardware group. + + Attributes: + name (str): + Required. The name of the hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateHardwareGroupRequest(proto.Message): + r"""A request to create a hardware group. + + Attributes: + parent (str): + Required. The order to create the hardware group in. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + hardware_group_id (str): + Optional. ID used to uniquely identify the HardwareGroup + within its parent scope. This field should contain at most + 63 characters and must start with lowercase characters. Only + lowercase characters, numbers and ``-`` are accepted. The + ``-`` character cannot be the first or the last one. A + system generated ID will be used if the field is not set. + + The hardware_group.name field in the request will be + ignored. + hardware_group (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup): + Required. The hardware group to create. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + hardware_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + hardware_group: resources.HardwareGroup = proto.Field( + proto.MESSAGE, + number=3, + message=resources.HardwareGroup, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateHardwareGroupRequest(proto.Message): + r"""A request to update a hardware group. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the HardwareGroup + to overwrite with this update. The fields specified in the + update_mask are relative to the hardware group, not the full + request. A field will be overwritten if it is in the mask. + If you don't provide a mask then all fields will be + overwritten. + hardware_group (google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup): + Required. The hardware group to update. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + hardware_group: resources.HardwareGroup = proto.Field( + proto.MESSAGE, + number=2, + message=resources.HardwareGroup, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteHardwareGroupRequest(proto.Message): + r"""A request to delete a hardware group. + + Attributes: + name (str): + Required. The name of the hardware group. Format: + ``projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}`` + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListHardwareRequest(proto.Message): + r"""A request to list hardware. + + Attributes: + parent (str): + Required. The project and location to list hardware in. + Format: ``projects/{project}/locations/{location}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListHardwareResponse(proto.Message): + r"""A list of hardware. + + Attributes: + hardware (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Hardware]): + The list of hardware. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + hardware: MutableSequence[resources.Hardware] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Hardware, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetHardwareRequest(proto.Message): + r"""A request to get hardware. + + Attributes: + name (str): + Required. The name of the hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateHardwareRequest(proto.Message): + r"""A request to create hardware. + + Attributes: + parent (str): + Required. The project and location to create hardware in. + Format: ``projects/{project}/locations/{location}`` + hardware_id (str): + Optional. ID used to uniquely identify the Hardware within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. Only + lowercase characters, numbers and ``-`` are accepted. The + ``-`` character cannot be the first or the last one. A + system generated ID will be used if the field is not set. + + The hardware.name field in the request will be ignored. + hardware (google.cloud.gdchardwaremanagement_v1alpha.types.Hardware): + Required. The resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + hardware_id: str = proto.Field( + proto.STRING, + number=2, + ) + hardware: resources.Hardware = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Hardware, + ) + + +class UpdateHardwareRequest(proto.Message): + r"""A request to update hardware. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Hardware to + overwrite with this update. The fields specified in the + update_mask are relative to the hardware, not the full + request. A field will be overwritten if it is in the mask. + If you don't provide a mask then all fields will be + overwritten. + hardware (google.cloud.gdchardwaremanagement_v1alpha.types.Hardware): + Required. The hardware to update. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + hardware: resources.Hardware = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Hardware, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteHardwareRequest(proto.Message): + r"""A request to delete hardware. + + Attributes: + name (str): + Required. The name of the hardware. Format: + ``projects/{project}/locations/{location}/hardware/{hardware}`` + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListCommentsRequest(proto.Message): + r"""A request to list comments. + + Attributes: + parent (str): + Required. The order to list comments on. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListCommentsResponse(proto.Message): + r"""A request to list comments. + + Attributes: + comments (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Comment]): + The list of comments. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + comments: MutableSequence[resources.Comment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Comment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetCommentRequest(proto.Message): + r"""A request to get a comment. + + Attributes: + name (str): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCommentRequest(proto.Message): + r"""A request to create a comment. + + Attributes: + parent (str): + Required. The order to create the comment on. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + comment_id (str): + Optional. ID used to uniquely identify the Comment within + its parent scope. This field should contain at most 63 + characters and must start with lowercase characters. Only + lowercase characters, numbers and ``-`` are accepted. The + ``-`` character cannot be the first or the last one. A + system generated ID will be used if the field is not set. + + The comment.name field in the request will be ignored. + comment (google.cloud.gdchardwaremanagement_v1alpha.types.Comment): + Required. The comment to create. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + comment_id: str = proto.Field( + proto.STRING, + number=2, + ) + comment: resources.Comment = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Comment, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListChangeLogEntriesRequest(proto.Message): + r"""A request to list change log entries. + + Attributes: + parent (str): + Required. The order to list change log entries for. Format: + ``projects/{project}/locations/{location}/orders/{order}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListChangeLogEntriesResponse(proto.Message): + r"""A list of change log entries. + + Attributes: + change_log_entries (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.ChangeLogEntry]): + The list of change log entries. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + change_log_entries: MutableSequence[resources.ChangeLogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.ChangeLogEntry, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetChangeLogEntryRequest(proto.Message): + r"""A request to get a change log entry. + + Attributes: + name (str): + Required. The name of the change log entry. Format: + ``projects/{project}/locations/{location}/orders/{order}/changeLogEntries/{change_log_entry}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSkusRequest(proto.Message): + r"""A request to list SKUs. + + Attributes: + parent (str): + Required. The project and location to list SKUs in. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListSkusResponse(proto.Message): + r"""A list of SKUs. + + Attributes: + skus (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Sku]): + The list of SKUs. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + skus: MutableSequence[resources.Sku] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Sku, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetSkuRequest(proto.Message): + r"""A request to get an SKU. + + Attributes: + name (str): + Required. The name of the SKU. Format: + ``projects/{project}/locations/{location}/skus/{sku}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListZonesRequest(proto.Message): + r"""A request to list zones. + + Attributes: + parent (str): + Required. The project and location to list zones in. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering condition. See + `AIP-160 `__. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListZonesResponse(proto.Message): + r"""A list of zones. + + Attributes: + zones (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.Zone]): + The list of zones. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + zones: MutableSequence[resources.Zone] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Zone, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetZoneRequest(proto.Message): + r"""A request to get a zone. + + Attributes: + name (str): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateZoneRequest(proto.Message): + r"""A request to create a zone. + + Attributes: + parent (str): + Required. The project and location to create the zone in. + Format: ``projects/{project}/locations/{location}`` + zone_id (str): + Optional. ID used to uniquely identify the Zone within its + parent scope. This field should contain at most 63 + characters and must start with lowercase characters. Only + lowercase characters, numbers and ``-`` are accepted. The + ``-`` character cannot be the first or the last one. A + system generated ID will be used if the field is not set. + + The zone.name field in the request will be ignored. + zone (google.cloud.gdchardwaremanagement_v1alpha.types.Zone): + Required. The zone to create. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + zone_id: str = proto.Field( + proto.STRING, + number=2, + ) + zone: resources.Zone = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Zone, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateZoneRequest(proto.Message): + r"""A request to update a zone. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask to specify the fields in the Zone to + overwrite with this update. The fields specified in the + update_mask are relative to the zone, not the full request. + A field will be overwritten if it is in the mask. If you + don't provide a mask then all fields will be overwritten. + zone (google.cloud.gdchardwaremanagement_v1alpha.types.Zone): + Required. The zone to update. + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + zone: resources.Zone = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Zone, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteZoneRequest(proto.Message): + r"""A request to delete a zone. + + Attributes: + name (str): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SignalZoneStateRequest(proto.Message): + r"""A request to signal the state of a zone. + + Attributes: + name (str): + Required. The name of the zone. Format: + ``projects/{project}/locations/{location}/zones/{zone}`` + request_id (str): + Optional. An optional unique identifier for this request. + See `AIP-155 `__. + state_signal (google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest.StateSignal): + Required. The state signal to send for this + zone. + """ + + class StateSignal(proto.Enum): + r"""Valid state signals for a zone. + + Values: + STATE_SIGNAL_UNSPECIFIED (0): + State signal of the zone is unspecified. + READY_FOR_SITE_TURNUP (1): + The Zone is ready for site turnup. + FACTORY_TURNUP_CHECKS_FAILED (2): + The Zone failed in factory turnup checks. + """ + STATE_SIGNAL_UNSPECIFIED = 0 + READY_FOR_SITE_TURNUP = 1 + FACTORY_TURNUP_CHECKS_FAILED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + state_signal: StateSignal = proto.Field( + proto.ENUM, + number=3, + enum=StateSignal, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. The verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gdchardwaremanagement/mypy.ini b/packages/google-cloud-gdchardwaremanagement/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-gdchardwaremanagement/noxfile.py b/packages/google-cloud-gdchardwaremanagement/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_async.py new file mode 100644 index 000000000000..b51d09489f15 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateComment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_create_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + comment = gdchardwaremanagement_v1alpha.Comment() + comment.text = "text_value" + + request = gdchardwaremanagement_v1alpha.CreateCommentRequest( + parent="parent_value", + comment=comment, + ) + + # Make the request + operation = client.create_comment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateComment_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_sync.py new file mode 100644 index 000000000000..ec73172220eb --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateComment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_create_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + comment = gdchardwaremanagement_v1alpha.Comment() + comment.text = "text_value" + + request = gdchardwaremanagement_v1alpha.CreateCommentRequest( + parent="parent_value", + comment=comment, + ) + + # Make the request + operation = client.create_comment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateComment_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_async.py new file mode 100644 index 000000000000..49482ac6b944 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardware_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_create_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareRequest( + parent="parent_value", + hardware=hardware, + ) + + # Make the request + operation = client.create_hardware(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardware_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_async.py new file mode 100644 index 000000000000..fbb7ff9d27cd --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardwareGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_create_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareGroupRequest( + parent="parent_value", + hardware_group=hardware_group, + ) + + # Make the request + operation = client.create_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardwareGroup_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_sync.py new file mode 100644 index 000000000000..791d6c684b72 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardwareGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_create_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareGroupRequest( + parent="parent_value", + hardware_group=hardware_group, + ) + + # Make the request + operation = client.create_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardwareGroup_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_sync.py new file mode 100644 index 000000000000..7082594ca4b6 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardware_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_create_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.CreateHardwareRequest( + parent="parent_value", + hardware=hardware, + ) + + # Make the request + operation = client.create_hardware(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardware_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_async.py new file mode 100644 index 000000000000..99ce6d599e40 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_create_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.CreateOrderRequest( + parent="parent_value", + order=order, + ) + + # Make the request + operation = client.create_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateOrder_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_sync.py new file mode 100644 index 000000000000..7a6d4b4cd061 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_create_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.CreateOrderRequest( + parent="parent_value", + order=order, + ) + + # Make the request + operation = client.create_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateOrder_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_async.py new file mode 100644 index 000000000000..b358b0fa5f7c --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_create_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.CreateSiteRequest( + parent="parent_value", + site=site, + ) + + # Make the request + operation = client.create_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateSite_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_sync.py new file mode 100644 index 000000000000..2f54e9ff3f99 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_create_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.CreateSiteRequest( + parent="parent_value", + site=site, + ) + + # Make the request + operation = client.create_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateSite_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_async.py new file mode 100644 index 000000000000..02e17f561657 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_create_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.CreateZoneRequest( + parent="parent_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateZone_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_sync.py new file mode 100644 index 000000000000..b544f3c754c6 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_create_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.CreateZoneRequest( + parent="parent_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateZone_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_async.py new file mode 100644 index 000000000000..6ba79ac0f5e0 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardware_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_delete_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardware_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_async.py new file mode 100644 index 000000000000..61722da50eb7 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardwareGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_delete_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardwareGroup_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_sync.py new file mode 100644 index 000000000000..64a2e899b3f1 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardwareGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_delete_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardwareGroup_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_sync.py new file mode 100644 index 000000000000..73020b9cb363 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardware_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_delete_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteHardwareRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_hardware(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardware_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_async.py new file mode 100644 index 000000000000..6556d40a6142 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_delete_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteOrder_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_sync.py new file mode 100644 index 000000000000..9b68476b2cde --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_delete_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteOrder_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_async.py new file mode 100644 index 000000000000..4566d56182c5 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_delete_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteZone_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_sync.py new file mode 100644 index 000000000000..318cae5ca0e8 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_delete_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteZone_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_async.py new file mode 100644 index 000000000000..34acaec36d1a --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChangeLogEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetChangeLogEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_change_log_entry(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetChangeLogEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_change_log_entry(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetChangeLogEntry_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_sync.py new file mode 100644 index 000000000000..ff236ebad94b --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChangeLogEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetChangeLogEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_change_log_entry(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetChangeLogEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_change_log_entry(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetChangeLogEntry_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_async.py new file mode 100644 index 000000000000..1a39f35d7511 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetComment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetCommentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_comment(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetComment_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_sync.py new file mode 100644 index 000000000000..6775d2b10941 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetComment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetCommentRequest( + name="name_value", + ) + + # Make the request + response = client.get_comment(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetComment_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_async.py new file mode 100644 index 000000000000..0c238b3bc157 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardware_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareRequest( + name="name_value", + ) + + # Make the request + response = await client.get_hardware(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardware_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_async.py new file mode 100644 index 000000000000..991d939e4504 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardwareGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_hardware_group(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardwareGroup_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_sync.py new file mode 100644 index 000000000000..d128f63c92d6 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardwareGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_hardware_group(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardwareGroup_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_sync.py new file mode 100644 index 000000000000..1c72fd6b80f3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardware_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetHardwareRequest( + name="name_value", + ) + + # Make the request + response = client.get_hardware(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardware_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_async.py new file mode 100644 index 000000000000..3bff76a76297 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetOrderRequest( + name="name_value", + ) + + # Make the request + response = await client.get_order(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetOrder_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_sync.py new file mode 100644 index 000000000000..6ed4e0e86f5b --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetOrderRequest( + name="name_value", + ) + + # Make the request + response = client.get_order(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetOrder_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_async.py new file mode 100644 index 000000000000..f047be86341d --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSite_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_sync.py new file mode 100644 index 000000000000..b35e75d7a01f --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_site(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSite_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_async.py new file mode 100644 index 000000000000..e2a5ad7e30b8 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSku +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSku_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_sku(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSkuRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sku(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSku_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_sync.py new file mode 100644 index 000000000000..efe0953f896e --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSku +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSku_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_sku(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetSkuRequest( + name="name_value", + ) + + # Make the request + response = client.get_sku(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSku_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_async.py new file mode 100644 index 000000000000..1f435a7d0de3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_get_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = await client.get_zone(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetZone_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_sync.py new file mode 100644 index 000000000000..ce30b6e400a2 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_get_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = client.get_zone(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetZone_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_async.py new file mode 100644 index 000000000000..43ee46dc33c9 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChangeLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListChangeLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_change_log_entries(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListChangeLogEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_change_log_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListChangeLogEntries_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_sync.py new file mode 100644 index 000000000000..bfcedf56a8a3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChangeLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListChangeLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_change_log_entries(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListChangeLogEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_change_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListChangeLogEntries_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_async.py new file mode 100644 index 000000000000..895835195598 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListComments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListComments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_comments(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListComments_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_sync.py new file mode 100644 index 000000000000..9a3839cebb05 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListComments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListComments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_comments(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListComments_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_async.py new file mode 100644 index 000000000000..386d212d873d --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardware_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardware_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_async.py new file mode 100644 index 000000000000..c6a72d5ce9f4 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHardwareGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardwareGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_hardware_groups(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardwareGroups_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_sync.py new file mode 100644 index 000000000000..3481cb20fd72 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHardwareGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardwareGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_hardware_groups(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardwareGroups_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_sync.py new file mode 100644 index 000000000000..b3541c7b7659 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardware_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListHardwareRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hardware(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardware_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_async.py new file mode 100644 index 000000000000..4a81d725e0a3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrders +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListOrders_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_orders(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListOrdersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_orders(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListOrders_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_sync.py new file mode 100644 index 000000000000..4f0ba5331742 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrders +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListOrders_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_orders(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListOrdersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_orders(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListOrders_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_async.py new file mode 100644 index 000000000000..b034202ba00f --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_sites(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSites_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_sync.py new file mode 100644 index 000000000000..4e11f31d63ba --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_sites(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSites_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_async.py new file mode 100644 index 000000000000..296cce10a3b3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSkus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSkus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_skus(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSkusRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_skus(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSkus_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_sync.py new file mode 100644 index 000000000000..2e3c624c6870 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSkus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSkus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_skus(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListSkusRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_skus(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSkus_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_async.py new file mode 100644 index 000000000000..784310004fcd --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZones +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListZones_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_list_zones(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListZones_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py new file mode 100644 index 000000000000..b95b08c6adcd --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZones +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListZones_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_list_zones(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListZones_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_async.py new file mode 100644 index 000000000000..63cb018f1818 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SignalZoneState +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SignalZoneState_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_signal_zone_state(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SignalZoneStateRequest( + name="name_value", + state_signal="FACTORY_TURNUP_CHECKS_FAILED", + ) + + # Make the request + operation = client.signal_zone_state(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SignalZoneState_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_sync.py new file mode 100644 index 000000000000..fbdb745f83fb --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SignalZoneState +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SignalZoneState_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_signal_zone_state(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SignalZoneStateRequest( + name="name_value", + state_signal="FACTORY_TURNUP_CHECKS_FAILED", + ) + + # Make the request + operation = client.signal_zone_state(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SignalZoneState_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_async.py new file mode 100644 index 000000000000..436a60453d53 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SubmitOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_submit_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SubmitOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.submit_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SubmitOrder_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_sync.py new file mode 100644 index 000000000000..7a17abbfc61f --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SubmitOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_submit_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.SubmitOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.submit_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SubmitOrder_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_async.py new file mode 100644 index 000000000000..1ae6043ee1c8 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardware_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_update_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareRequest( + hardware=hardware, + ) + + # Make the request + operation = client.update_hardware(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardware_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_async.py new file mode 100644 index 000000000000..a35dcc92fbd6 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardwareGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_update_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareGroupRequest( + hardware_group=hardware_group, + ) + + # Make the request + operation = client.update_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardwareGroup_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_sync.py new file mode 100644 index 000000000000..d7030a04402d --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHardwareGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardwareGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_update_hardware_group(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware_group = gdchardwaremanagement_v1alpha.HardwareGroup() + hardware_group.hardware_count = 1494 + hardware_group.config.sku = "sku_value" + hardware_group.config.power_supply = "POWER_SUPPLY_DC" + hardware_group.site = "site_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareGroupRequest( + hardware_group=hardware_group, + ) + + # Make the request + operation = client.update_hardware_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardwareGroup_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_sync.py new file mode 100644 index 000000000000..332884d94ec4 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHardware +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardware_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_update_hardware(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + hardware = gdchardwaremanagement_v1alpha.Hardware() + hardware.order = "order_value" + hardware.site = "site_value" + hardware.config.sku = "sku_value" + hardware.config.power_supply = "POWER_SUPPLY_DC" + hardware.zone = "zone_value" + + request = gdchardwaremanagement_v1alpha.UpdateHardwareRequest( + hardware=hardware, + ) + + # Make the request + operation = client.update_hardware(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardware_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_async.py new file mode 100644 index 000000000000..9684cb29f2dd --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_update_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.UpdateOrderRequest( + order=order, + ) + + # Make the request + operation = client.update_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateOrder_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_sync.py new file mode 100644 index 000000000000..bf7bac680e2a --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_update_order(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + order = gdchardwaremanagement_v1alpha.Order() + order.organization_contact.contacts.given_name = "given_name_value" + order.organization_contact.contacts.email = "email_value" + order.organization_contact.contacts.phone = "phone_value" + order.customer_motivation = "customer_motivation_value" + order.region_code = "region_code_value" + order.billing_id = "billing_id_value" + + request = gdchardwaremanagement_v1alpha.UpdateOrderRequest( + order=order, + ) + + # Make the request + operation = client.update_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateOrder_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_async.py new file mode 100644 index 000000000000..d1356aee37c0 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_update_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.UpdateSiteRequest( + site=site, + ) + + # Make the request + operation = client.update_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateSite_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_sync.py new file mode 100644 index 000000000000..5f6c99d8199f --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_update_site(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + site = gdchardwaremanagement_v1alpha.Site() + site.organization_contact.contacts.given_name = "given_name_value" + site.organization_contact.contacts.email = "email_value" + site.organization_contact.contacts.phone = "phone_value" + site.google_maps_pin_uri = "google_maps_pin_uri_value" + + request = gdchardwaremanagement_v1alpha.UpdateSiteRequest( + site=site, + ) + + # Make the request + operation = client.update_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateSite_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_async.py new file mode 100644 index 000000000000..cca930e25a20 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_update_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateZone_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_sync.py new file mode 100644 index 000000000000..91ee6adb3b98 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_update_zone(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + zone = gdchardwaremanagement_v1alpha.Zone() + zone.contacts.given_name = "given_name_value" + zone.contacts.email = "email_value" + zone.contacts.phone = "phone_value" + + request = gdchardwaremanagement_v1alpha.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateZone_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json new file mode 100644 index 000000000000..ec6fd1d0b74b --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -0,0 +1,5472 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.gdchardwaremanagement.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-cloud-gdchardwaremanagement", + "version": "0.1.1" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.create_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateCommentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "comment", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment" + }, + { + "name": "comment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_comment" + }, + "description": "Sample for CreateComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateComment_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.create_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateCommentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "comment", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment" + }, + { + "name": "comment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_comment" + }, + "description": "Sample for CreateComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateComment_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_comment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.create_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "hardware_group", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup" + }, + { + "name": "hardware_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_hardware_group" + }, + "description": "Sample for CreateHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardwareGroup_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.create_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "hardware_group", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup" + }, + { + "name": "hardware_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_hardware_group" + }, + "description": "Sample for CreateHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardwareGroup_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.create_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "hardware", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Hardware" + }, + { + "name": "hardware_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_hardware" + }, + "description": "Sample for CreateHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardware_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.create_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateHardwareRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "hardware", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Hardware" + }, + { + "name": "hardware_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_hardware" + }, + "description": "Sample for CreateHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateHardware_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_hardware_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.create_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateOrderRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "order", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Order" + }, + { + "name": "order_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_order" + }, + "description": "Sample for CreateOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateOrder_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.create_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateOrderRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "order", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Order" + }, + { + "name": "order_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_order" + }, + "description": "Sample for CreateOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateOrder_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.create_site", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateSite", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateSiteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "site", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Site" + }, + { + "name": "site_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_site" + }, + "description": "Sample for CreateSite", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateSite_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.create_site", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateSite", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateSiteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "site", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Site" + }, + { + "name": "site_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_site" + }, + "description": "Sample for CreateSite", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateSite_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_site_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.create_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateZoneRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "zone", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Zone" + }, + { + "name": "zone_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_zone" + }, + "description": "Sample for CreateZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateZone_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.create_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.CreateZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "CreateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.CreateZoneRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "zone", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Zone" + }, + { + "name": "zone_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_zone" + }, + "description": "Sample for CreateZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_CreateZone_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_create_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.delete_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_hardware_group" + }, + "description": "Sample for DeleteHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardwareGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.delete_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_hardware_group" + }, + "description": "Sample for DeleteHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardwareGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.delete_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_hardware" + }, + "description": "Sample for DeleteHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardware_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.delete_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteHardwareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_hardware" + }, + "description": "Sample for DeleteHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteHardware_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_hardware_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.delete_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteOrderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_order" + }, + "description": "Sample for DeleteOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteOrder_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.delete_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteOrderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_order" + }, + "description": "Sample for DeleteOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.delete_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_zone" + }, + "description": "Sample for DeleteZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteZone_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.delete_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.DeleteZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "DeleteZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.DeleteZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_zone" + }, + "description": "Sample for DeleteZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_DeleteZone_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_delete_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_change_log_entry", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetChangeLogEntry", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetChangeLogEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetChangeLogEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.ChangeLogEntry", + "shortName": "get_change_log_entry" + }, + "description": "Sample for GetChangeLogEntry", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetChangeLogEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_change_log_entry", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetChangeLogEntry", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetChangeLogEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetChangeLogEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.ChangeLogEntry", + "shortName": "get_change_log_entry" + }, + "description": "Sample for GetChangeLogEntry", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetChangeLogEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_change_log_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetCommentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment", + "shortName": "get_comment" + }, + "description": "Sample for GetComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetComment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetCommentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment", + "shortName": "get_comment" + }, + "description": "Sample for GetComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetComment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_comment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup", + "shortName": "get_hardware_group" + }, + "description": "Sample for GetHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardwareGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup", + "shortName": "get_hardware_group" + }, + "description": "Sample for GetHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardwareGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Hardware", + "shortName": "get_hardware" + }, + "description": "Sample for GetHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardware_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetHardwareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Hardware", + "shortName": "get_hardware" + }, + "description": "Sample for GetHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetHardware_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_hardware_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetOrderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Order", + "shortName": "get_order" + }, + "description": "Sample for GetOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetOrder_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetOrderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Order", + "shortName": "get_order" + }, + "description": "Sample for GetOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetOrder_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_site", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetSite", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetSiteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Site", + "shortName": "get_site" + }, + "description": "Sample for GetSite", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSite_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_site", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetSite", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetSiteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Site", + "shortName": "get_site" + }, + "description": "Sample for GetSite", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSite_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_site_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_sku", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetSku", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetSku" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetSkuRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Sku", + "shortName": "get_sku" + }, + "description": "Sample for GetSku", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSku_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_sku", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetSku", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetSku" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetSkuRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Sku", + "shortName": "get_sku" + }, + "description": "Sample for GetSku", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetSku_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_sku_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.get_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Zone", + "shortName": "get_zone" + }, + "description": "Sample for GetZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetZone_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.get_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.GetZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "GetZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.GetZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Zone", + "shortName": "get_zone" + }, + "description": "Sample for GetZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_GetZone_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_get_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_change_log_entries", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListChangeLogEntries", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListChangeLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListChangeLogEntriesAsyncPager", + "shortName": "list_change_log_entries" + }, + "description": "Sample for ListChangeLogEntries", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListChangeLogEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_change_log_entries", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListChangeLogEntries", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListChangeLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListChangeLogEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListChangeLogEntriesPager", + "shortName": "list_change_log_entries" + }, + "description": "Sample for ListChangeLogEntries", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListChangeLogEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_change_log_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_comments", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListComments", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListComments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListCommentsAsyncPager", + "shortName": "list_comments" + }, + "description": "Sample for ListComments", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListComments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_comments", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListComments", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListComments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListCommentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListCommentsPager", + "shortName": "list_comments" + }, + "description": "Sample for ListComments", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListComments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_comments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_hardware_groups", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListHardwareGroups", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListHardwareGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwareGroupsAsyncPager", + "shortName": "list_hardware_groups" + }, + "description": "Sample for ListHardwareGroups", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardwareGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_hardware_groups", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListHardwareGroups", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListHardwareGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwareGroupsPager", + "shortName": "list_hardware_groups" + }, + "description": "Sample for ListHardwareGroups", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardwareGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwareAsyncPager", + "shortName": "list_hardware" + }, + "description": "Sample for ListHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardware_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListHardwareRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListHardwarePager", + "shortName": "list_hardware" + }, + "description": "Sample for ListHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListHardware_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_hardware_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_orders", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListOrders", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListOrders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListOrdersAsyncPager", + "shortName": "list_orders" + }, + "description": "Sample for ListOrders", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListOrders_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_orders", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListOrders", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListOrders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListOrdersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListOrdersPager", + "shortName": "list_orders" + }, + "description": "Sample for ListOrders", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListOrders_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_orders_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_sites", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListSites", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSitesAsyncPager", + "shortName": "list_sites" + }, + "description": "Sample for ListSites", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSites_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_sites", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListSites", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListSitesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSitesPager", + "shortName": "list_sites" + }, + "description": "Sample for ListSites", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSites_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_skus", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListSkus", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListSkus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSkusAsyncPager", + "shortName": "list_skus" + }, + "description": "Sample for ListSkus", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSkus_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_skus", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListSkus", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListSkus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListSkusRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListSkusPager", + "shortName": "list_skus" + }, + "description": "Sample for ListSkus", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListSkus_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_skus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.list_zones", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListZones", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListZones" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListZonesAsyncPager", + "shortName": "list_zones" + }, + "description": "Sample for ListZones", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListZones_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.list_zones", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.ListZones", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "ListZones" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.ListZonesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.pagers.ListZonesPager", + "shortName": "list_zones" + }, + "description": "Sample for ListZones", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_ListZones_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.signal_zone_state", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.SignalZoneState", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "SignalZoneState" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "state_signal", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest.StateSignal" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "signal_zone_state" + }, + "description": "Sample for SignalZoneState", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SignalZoneState_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.signal_zone_state", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.SignalZoneState", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "SignalZoneState" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "state_signal", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.SignalZoneStateRequest.StateSignal" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "signal_zone_state" + }, + "description": "Sample for SignalZoneState", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SignalZoneState_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_signal_zone_state_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.submit_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.SubmitOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "SubmitOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.SubmitOrderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "submit_order" + }, + "description": "Sample for SubmitOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SubmitOrder_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.submit_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.SubmitOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "SubmitOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.SubmitOrderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "submit_order" + }, + "description": "Sample for SubmitOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_SubmitOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_submit_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.update_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareGroupRequest" + }, + { + "name": "hardware_group", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_hardware_group" + }, + "description": "Sample for UpdateHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardwareGroup_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.update_hardware_group", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateHardwareGroup", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateHardwareGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareGroupRequest" + }, + { + "name": "hardware_group", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.HardwareGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_hardware_group" + }, + "description": "Sample for UpdateHardwareGroup", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardwareGroup_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.update_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareRequest" + }, + { + "name": "hardware", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Hardware" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_hardware" + }, + "description": "Sample for UpdateHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardware_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.update_hardware", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateHardware", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateHardware" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateHardwareRequest" + }, + { + "name": "hardware", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Hardware" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_hardware" + }, + "description": "Sample for UpdateHardware", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateHardware_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_hardware_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.update_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateOrderRequest" + }, + { + "name": "order", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Order" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_order" + }, + "description": "Sample for UpdateOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateOrder_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.update_order", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateOrder", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateOrderRequest" + }, + { + "name": "order", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Order" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_order" + }, + "description": "Sample for UpdateOrder", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateOrder_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.update_site", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateSite", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateSiteRequest" + }, + { + "name": "site", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Site" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_site" + }, + "description": "Sample for UpdateSite", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateSite_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.update_site", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateSite", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateSiteRequest" + }, + { + "name": "site", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Site" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_site" + }, + "description": "Sample for UpdateSite", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateSite_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_site_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.update_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateZoneRequest" + }, + { + "name": "zone", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Zone" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_zone" + }, + "description": "Sample for UpdateZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateZone_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.update_zone", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.UpdateZone", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "UpdateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.UpdateZoneRequest" + }, + { + "name": "zone", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.Zone" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_zone" + }, + "description": "Sample for UpdateZone", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_UpdateZone_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_update_zone_sync.py" + } + ] +} diff --git a/packages/google-cloud-gdchardwaremanagement/scripts/decrypt-secrets.sh b/packages/google-cloud-gdchardwaremanagement/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py b/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py new file mode 100644 index 000000000000..0a8496e2ccc3 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py @@ -0,0 +1,208 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class gdchardwaremanagementCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_comment': ('parent', 'comment', 'comment_id', 'request_id', ), + 'create_hardware': ('parent', 'hardware', 'hardware_id', ), + 'create_hardware_group': ('parent', 'hardware_group', 'hardware_group_id', 'request_id', ), + 'create_order': ('parent', 'order', 'order_id', 'request_id', ), + 'create_site': ('parent', 'site', 'site_id', 'request_id', ), + 'create_zone': ('parent', 'zone', 'zone_id', 'request_id', ), + 'delete_hardware': ('name', 'request_id', ), + 'delete_hardware_group': ('name', 'request_id', ), + 'delete_order': ('name', 'request_id', 'force', ), + 'delete_zone': ('name', 'request_id', ), + 'get_change_log_entry': ('name', ), + 'get_comment': ('name', ), + 'get_hardware': ('name', ), + 'get_hardware_group': ('name', ), + 'get_order': ('name', ), + 'get_site': ('name', ), + 'get_sku': ('name', ), + 'get_zone': ('name', ), + 'list_change_log_entries': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_comments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_hardware': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_hardware_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_orders': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_sites': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_skus': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'signal_zone_state': ('name', 'state_signal', 'request_id', ), + 'submit_order': ('name', 'request_id', ), + 'update_hardware': ('update_mask', 'hardware', 'request_id', ), + 'update_hardware_group': ('update_mask', 'hardware_group', 'request_id', ), + 'update_order': ('update_mask', 'order', 'request_id', ), + 'update_site': ('update_mask', 'site', 'request_id', ), + 'update_zone': ('update_mask', 'zone', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=gdchardwaremanagementCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the gdchardwaremanagement client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-gdchardwaremanagement/setup.py b/packages/google-cloud-gdchardwaremanagement/setup.py new file mode 100644 index 000000000000..89cff3e32278 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-gdchardwaremanagement" + + +description = "Google Cloud Gdchardwaremanagement API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/gdchardwaremanagement/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-gdchardwaremanagement/testing/.gitignore b/packages/google-cloud-gdchardwaremanagement/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.10.txt b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.11.txt b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.12.txt b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.7.txt b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.7.txt new file mode 100644 index 000000000000..b8a550c73855 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.8.txt b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.9.txt b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-gdchardwaremanagement/tests/__init__.py b/packages/google-cloud-gdchardwaremanagement/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/__init__.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/__init__.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/__init__.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py new file mode 100644 index 000000000000..cd5461e2ca65 --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py @@ -0,0 +1,29898 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management import ( + GDCHardwareManagementAsyncClient, + GDCHardwareManagementClient, + pagers, + transports, +) +from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GDCHardwareManagementClient._get_default_mtls_endpoint(None) is None + assert ( + GDCHardwareManagementClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GDCHardwareManagementClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GDCHardwareManagementClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GDCHardwareManagementClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GDCHardwareManagementClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert GDCHardwareManagementClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert GDCHardwareManagementClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert GDCHardwareManagementClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + GDCHardwareManagementClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert GDCHardwareManagementClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert GDCHardwareManagementClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert GDCHardwareManagementClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + GDCHardwareManagementClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert GDCHardwareManagementClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert GDCHardwareManagementClient._get_client_cert_source(None, False) is None + assert ( + GDCHardwareManagementClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + GDCHardwareManagementClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + GDCHardwareManagementClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + GDCHardwareManagementClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + GDCHardwareManagementClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementClient), +) +@mock.patch.object( + GDCHardwareManagementAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = GDCHardwareManagementClient._DEFAULT_UNIVERSE + default_endpoint = GDCHardwareManagementClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GDCHardwareManagementClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + GDCHardwareManagementClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + GDCHardwareManagementClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == GDCHardwareManagementClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GDCHardwareManagementClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + GDCHardwareManagementClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == GDCHardwareManagementClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GDCHardwareManagementClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == GDCHardwareManagementClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GDCHardwareManagementClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + GDCHardwareManagementClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + GDCHardwareManagementClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + GDCHardwareManagementClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + GDCHardwareManagementClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + GDCHardwareManagementClient._get_universe_domain(None, None) + == GDCHardwareManagementClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + GDCHardwareManagementClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GDCHardwareManagementClient, "grpc"), + (GDCHardwareManagementAsyncClient, "grpc_asyncio"), + (GDCHardwareManagementClient, "rest"), + ], +) +def test_gdc_hardware_management_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "gdchardwaremanagement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gdchardwaremanagement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GDCHardwareManagementGrpcTransport, "grpc"), + (transports.GDCHardwareManagementGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GDCHardwareManagementRestTransport, "rest"), + ], +) +def test_gdc_hardware_management_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GDCHardwareManagementClient, "grpc"), + (GDCHardwareManagementAsyncClient, "grpc_asyncio"), + (GDCHardwareManagementClient, "rest"), + ], +) +def test_gdc_hardware_management_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "gdchardwaremanagement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gdchardwaremanagement.googleapis.com" + ) + + +def test_gdc_hardware_management_client_get_transport_class(): + transport = GDCHardwareManagementClient.get_transport_class() + available_transports = [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementRestTransport, + ] + assert transport in available_transports + + transport = GDCHardwareManagementClient.get_transport_class("grpc") + assert transport == transports.GDCHardwareManagementGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + ), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + GDCHardwareManagementClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementClient), +) +@mock.patch.object( + GDCHardwareManagementAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementAsyncClient), +) +def test_gdc_hardware_management_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GDCHardwareManagementClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GDCHardwareManagementClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + "true", + ), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + "false", + ), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementRestTransport, + "rest", + "true", + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + GDCHardwareManagementClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementClient), +) +@mock.patch.object( + GDCHardwareManagementAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_gdc_hardware_management_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [GDCHardwareManagementClient, GDCHardwareManagementAsyncClient] +) +@mock.patch.object( + GDCHardwareManagementClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GDCHardwareManagementClient), +) +@mock.patch.object( + GDCHardwareManagementAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GDCHardwareManagementAsyncClient), +) +def test_gdc_hardware_management_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [GDCHardwareManagementClient, GDCHardwareManagementAsyncClient] +) +@mock.patch.object( + GDCHardwareManagementClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementClient), +) +@mock.patch.object( + GDCHardwareManagementAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GDCHardwareManagementAsyncClient), +) +def test_gdc_hardware_management_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = GDCHardwareManagementClient._DEFAULT_UNIVERSE + default_endpoint = GDCHardwareManagementClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GDCHardwareManagementClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + ), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementRestTransport, + "rest", + ), + ], +) +def test_gdc_hardware_management_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementRestTransport, + "rest", + None, + ), + ], +) +def test_gdc_hardware_management_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_gdc_hardware_management_client_client_options_from_dict(): + with mock.patch( + "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.transports.GDCHardwareManagementGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GDCHardwareManagementClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GDCHardwareManagementClient, + transports.GDCHardwareManagementGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_gdc_hardware_management_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "gdchardwaremanagement.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="gdchardwaremanagement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListOrdersRequest, + dict, + ], +) +def test_list_orders(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListOrdersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListOrdersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_orders_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_orders() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListOrdersRequest() + + +def test_list_orders_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListOrdersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_orders(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListOrdersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_orders_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_orders in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + request = {} + client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_orders(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_orders_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListOrdersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_orders() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListOrdersRequest() + + +@pytest.mark.asyncio +async def test_list_orders_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_orders + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_orders + ] = mock_object + + request = {} + await client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_orders(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_orders_async( + transport: str = "grpc_asyncio", request_type=service.ListOrdersRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListOrdersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListOrdersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrdersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_orders_async_from_dict(): + await test_list_orders_async(request_type=dict) + + +def test_list_orders_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListOrdersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + call.return_value = service.ListOrdersResponse() + client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_orders_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListOrdersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListOrdersResponse() + ) + await client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_orders_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListOrdersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_orders( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_orders_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + service.ListOrdersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_orders_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListOrdersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListOrdersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_orders( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_orders_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_orders( + service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_orders(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Order) for i in results) + + +def test_list_orders_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_orders), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + RuntimeError, + ) + pages = list(client.list_orders(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_orders_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_orders), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_orders( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Order) for i in responses) + + +@pytest.mark.asyncio +async def test_list_orders_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_orders), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_orders(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetOrderRequest, + dict, + ], +) +def test_get_order(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Order( + name="name_value", + display_name="display_name_value", + state=resources.Order.State.DRAFT, + target_workloads=["target_workloads_value"], + customer_motivation="customer_motivation_value", + region_code="region_code_value", + order_form_uri="order_form_uri_value", + type_=resources.Order.Type.PAID, + billing_id="billing_id_value", + ) + response = client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Order.State.DRAFT + assert response.target_workloads == ["target_workloads_value"] + assert response.customer_motivation == "customer_motivation_value" + assert response.region_code == "region_code_value" + assert response.order_form_uri == "order_form_uri_value" + assert response.type_ == resources.Order.Type.PAID + assert response.billing_id == "billing_id_value" + + +def test_get_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetOrderRequest() + + +def test_get_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetOrderRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetOrderRequest( + name="name_value", + ) + + +def test_get_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + request = {} + client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Order( + name="name_value", + display_name="display_name_value", + state=resources.Order.State.DRAFT, + target_workloads=["target_workloads_value"], + customer_motivation="customer_motivation_value", + region_code="region_code_value", + order_form_uri="order_form_uri_value", + type_=resources.Order.Type.PAID, + billing_id="billing_id_value", + ) + ) + response = await client.get_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetOrderRequest() + + +@pytest.mark.asyncio +async def test_get_order_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_order + ] = mock_object + + request = {} + await client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_order_async( + transport: str = "grpc_asyncio", request_type=service.GetOrderRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Order( + name="name_value", + display_name="display_name_value", + state=resources.Order.State.DRAFT, + target_workloads=["target_workloads_value"], + customer_motivation="customer_motivation_value", + region_code="region_code_value", + order_form_uri="order_form_uri_value", + type_=resources.Order.Type.PAID, + billing_id="billing_id_value", + ) + ) + response = await client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Order.State.DRAFT + assert response.target_workloads == ["target_workloads_value"] + assert response.customer_motivation == "customer_motivation_value" + assert response.region_code == "region_code_value" + assert response.order_form_uri == "order_form_uri_value" + assert response.type_ == resources.Order.Type.PAID + assert response.billing_id == "billing_id_value" + + +@pytest.mark.asyncio +async def test_get_order_async_from_dict(): + await test_get_order_async(request_type=dict) + + +def test_get_order_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + call.return_value = resources.Order() + client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_order_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Order()) + await client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_order_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Order() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_order( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_order_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_order( + service.GetOrderRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_order_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Order() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Order()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_order( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_order_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_order( + service.GetOrderRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateOrderRequest, + dict, + ], +) +def test_create_order(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateOrderRequest() + + +def test_create_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateOrderRequest( + parent="parent_value", + order_id="order_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateOrderRequest( + parent="parent_value", + order_id="order_id_value", + request_id="request_id_value", + ) + + +def test_create_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_order] = mock_rpc + request = {} + client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateOrderRequest() + + +@pytest.mark.asyncio +async def test_create_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_order + ] = mock_object + + request = {} + await client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_order_async( + transport: str = "grpc_asyncio", request_type=service.CreateOrderRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_order_async_from_dict(): + await test_create_order_async(request_type=dict) + + +def test_create_order_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateOrderRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_order_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateOrderRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_order_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_order( + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].order + mock_val = resources.Order(name="name_value") + assert arg == mock_val + arg = args[0].order_id + mock_val = "order_id_value" + assert arg == mock_val + + +def test_create_order_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_order( + service.CreateOrderRequest(), + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_order_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_order( + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].order + mock_val = resources.Order(name="name_value") + assert arg == mock_val + arg = args[0].order_id + mock_val = "order_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_order_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_order( + service.CreateOrderRequest(), + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateOrderRequest, + dict, + ], +) +def test_update_order(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateOrderRequest() + + +def test_update_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateOrderRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateOrderRequest( + request_id="request_id_value", + ) + + +def test_update_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_order] = mock_rpc + request = {} + client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateOrderRequest() + + +@pytest.mark.asyncio +async def test_update_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_order + ] = mock_object + + request = {} + await client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_order_async( + transport: str = "grpc_asyncio", request_type=service.UpdateOrderRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_order_async_from_dict(): + await test_update_order_async(request_type=dict) + + +def test_update_order_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateOrderRequest() + + request.order.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "order.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_order_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateOrderRequest() + + request.order.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "order.name=name_value", + ) in kw["metadata"] + + +def test_update_order_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_order( + order=resources.Order(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].order + mock_val = resources.Order(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_order_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_order( + service.UpdateOrderRequest(), + order=resources.Order(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_order_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_order( + order=resources.Order(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].order + mock_val = resources.Order(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_order_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_order( + service.UpdateOrderRequest(), + order=resources.Order(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteOrderRequest, + dict, + ], +) +def test_delete_order(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteOrderRequest() + + +def test_delete_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteOrderRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteOrderRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_order] = mock_rpc + request = {} + client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteOrderRequest() + + +@pytest.mark.asyncio +async def test_delete_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_order + ] = mock_object + + request = {} + await client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_order_async( + transport: str = "grpc_asyncio", request_type=service.DeleteOrderRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_order_async_from_dict(): + await test_delete_order_async(request_type=dict) + + +def test_delete_order_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_order_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_order_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_order( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_order_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_order( + service.DeleteOrderRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_order_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_order( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_order_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_order( + service.DeleteOrderRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SubmitOrderRequest, + dict, + ], +) +def test_submit_order(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.SubmitOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_submit_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.submit_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SubmitOrderRequest() + + +def test_submit_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.SubmitOrderRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.submit_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SubmitOrderRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_submit_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.submit_order] = mock_rpc + request = {} + client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.submit_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_submit_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.submit_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SubmitOrderRequest() + + +@pytest.mark.asyncio +async def test_submit_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.submit_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.submit_order + ] = mock_object + + request = {} + await client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.submit_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_submit_order_async( + transport: str = "grpc_asyncio", request_type=service.SubmitOrderRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.SubmitOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_submit_order_async_from_dict(): + await test_submit_order_async(request_type=dict) + + +def test_submit_order_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SubmitOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_order_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SubmitOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_submit_order_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.submit_order( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_submit_order_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_order( + service.SubmitOrderRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_submit_order_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.submit_order( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_submit_order_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.submit_order( + service.SubmitOrderRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSitesRequest, + dict, + ], +) +def test_list_sites(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSitesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListSitesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSitesRequest() + + +def test_list_sites_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListSitesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sites(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSitesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_sites_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sites in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sites] = mock_rpc + request = {} + client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sites(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sites_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSitesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSitesRequest() + + +@pytest.mark.asyncio +async def test_list_sites_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sites + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sites + ] = mock_object + + request = {} + await client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sites(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sites_async( + transport: str = "grpc_asyncio", request_type=service.ListSitesRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSitesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListSitesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSitesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_sites_async_from_dict(): + await test_list_sites_async(request_type=dict) + + +def test_list_sites_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + call.return_value = service.ListSitesResponse() + client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sites_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSitesResponse() + ) + await client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_sites_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSitesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sites_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sites( + service.ListSitesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sites_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSitesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSitesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sites_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sites( + service.ListSitesRequest(), + parent="parent_value", + ) + + +def test_list_sites_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sites(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Site) for i in results) + + +def test_list_sites_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sites), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sites(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sites_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sites), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sites( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Site) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sites_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sites), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sites(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSiteRequest, + dict, + ], +) +def test_get_site(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Site( + name="name_value", + display_name="display_name_value", + description="description_value", + google_maps_pin_uri="google_maps_pin_uri_value", + notes="notes_value", + ) + response = client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetSiteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Site) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.google_maps_pin_uri == "google_maps_pin_uri_value" + assert response.notes == "notes_value" + + +def test_get_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSiteRequest() + + +def test_get_site_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetSiteRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_site(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSiteRequest( + name="name_value", + ) + + +def test_get_site_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_site in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_site] = mock_rpc + request = {} + client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_site_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Site( + name="name_value", + display_name="display_name_value", + description="description_value", + google_maps_pin_uri="google_maps_pin_uri_value", + notes="notes_value", + ) + ) + response = await client.get_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSiteRequest() + + +@pytest.mark.asyncio +async def test_get_site_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_site + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_site + ] = mock_object + + request = {} + await client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_site_async( + transport: str = "grpc_asyncio", request_type=service.GetSiteRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Site( + name="name_value", + display_name="display_name_value", + description="description_value", + google_maps_pin_uri="google_maps_pin_uri_value", + notes="notes_value", + ) + ) + response = await client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetSiteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Site) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.google_maps_pin_uri == "google_maps_pin_uri_value" + assert response.notes == "notes_value" + + +@pytest.mark.asyncio +async def test_get_site_async_from_dict(): + await test_get_site_async(request_type=dict) + + +def test_get_site_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + call.return_value = resources.Site() + client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_site_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Site()) + await client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_site_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Site() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_site_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site( + service.GetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_site_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Site() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Site()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_site_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_site( + service.GetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSiteRequest, + dict, + ], +) +def test_create_site(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateSiteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSiteRequest() + + +def test_create_site_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateSiteRequest( + parent="parent_value", + site_id="site_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_site(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSiteRequest( + parent="parent_value", + site_id="site_id_value", + request_id="request_id_value", + ) + + +def test_create_site_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_site in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_site] = mock_rpc + request = {} + client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_site_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSiteRequest() + + +@pytest.mark.asyncio +async def test_create_site_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_site + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_site + ] = mock_object + + request = {} + await client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_site_async( + transport: str = "grpc_asyncio", request_type=service.CreateSiteRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateSiteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_site_async_from_dict(): + await test_create_site_async(request_type=dict) + + +def test_create_site_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_site_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_site_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_site( + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].site + mock_val = resources.Site(name="name_value") + assert arg == mock_val + arg = args[0].site_id + mock_val = "site_id_value" + assert arg == mock_val + + +def test_create_site_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_site( + service.CreateSiteRequest(), + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_site_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_site( + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].site + mock_val = resources.Site(name="name_value") + assert arg == mock_val + arg = args[0].site_id + mock_val = "site_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_site_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_site( + service.CreateSiteRequest(), + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateSiteRequest, + dict, + ], +) +def test_update_site(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateSiteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSiteRequest() + + +def test_update_site_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateSiteRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_site(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSiteRequest( + request_id="request_id_value", + ) + + +def test_update_site_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_site in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_site] = mock_rpc + request = {} + client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_site_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSiteRequest() + + +@pytest.mark.asyncio +async def test_update_site_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_site + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_site + ] = mock_object + + request = {} + await client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_site_async( + transport: str = "grpc_asyncio", request_type=service.UpdateSiteRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateSiteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_site_async_from_dict(): + await test_update_site_async(request_type=dict) + + +def test_update_site_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateSiteRequest() + + request.site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_site_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateSiteRequest() + + request.site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site.name=name_value", + ) in kw["metadata"] + + +def test_update_site_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_site( + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].site + mock_val = resources.Site(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_site_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_site( + service.UpdateSiteRequest(), + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_site_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_site( + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].site + mock_val = resources.Site(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_site_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_site( + service.UpdateSiteRequest(), + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListHardwareGroupsRequest, + dict, + ], +) +def test_list_hardware_groups(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListHardwareGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListHardwareGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHardwareGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_hardware_groups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_hardware_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListHardwareGroupsRequest() + + +def test_list_hardware_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListHardwareGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_hardware_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListHardwareGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_hardware_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_hardware_groups in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_hardware_groups + ] = mock_rpc + request = {} + client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_hardware_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_hardware_groups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_hardware_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListHardwareGroupsRequest() + + +@pytest.mark.asyncio +async def test_list_hardware_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_hardware_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_hardware_groups + ] = mock_object + + request = {} + await client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_hardware_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_hardware_groups_async( + transport: str = "grpc_asyncio", request_type=service.ListHardwareGroupsRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListHardwareGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHardwareGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_hardware_groups_async_from_dict(): + await test_list_hardware_groups_async(request_type=dict) + + +def test_list_hardware_groups_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListHardwareGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + call.return_value = service.ListHardwareGroupsResponse() + client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_hardware_groups_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListHardwareGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareGroupsResponse() + ) + await client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_hardware_groups_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListHardwareGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_hardware_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_hardware_groups_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hardware_groups( + service.ListHardwareGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_hardware_groups_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListHardwareGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_hardware_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_hardware_groups_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_hardware_groups( + service.ListHardwareGroupsRequest(), + parent="parent_value", + ) + + +def test_list_hardware_groups_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_hardware_groups(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.HardwareGroup) for i in results) + + +def test_list_hardware_groups_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_hardware_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_hardware_groups_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_hardware_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.HardwareGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_hardware_groups_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_hardware_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetHardwareGroupRequest, + dict, + ], +) +def test_get_hardware_group(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.HardwareGroup( + name="name_value", + hardware_count=1494, + site="site_value", + state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, + zone="zone_value", + ) + response = client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.HardwareGroup) + assert response.name == "name_value" + assert response.hardware_count == 1494 + assert response.site == "site_value" + assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED + assert response.zone == "zone_value" + + +def test_get_hardware_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetHardwareGroupRequest() + + +def test_get_hardware_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetHardwareGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_hardware_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetHardwareGroupRequest( + name="name_value", + ) + + +def test_get_hardware_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_hardware_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_hardware_group + ] = mock_rpc + request = {} + client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_hardware_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.HardwareGroup( + name="name_value", + hardware_count=1494, + site="site_value", + state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, + zone="zone_value", + ) + ) + response = await client.get_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetHardwareGroupRequest() + + +@pytest.mark.asyncio +async def test_get_hardware_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_hardware_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_hardware_group + ] = mock_object + + request = {} + await client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_hardware_group_async( + transport: str = "grpc_asyncio", request_type=service.GetHardwareGroupRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.HardwareGroup( + name="name_value", + hardware_count=1494, + site="site_value", + state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, + zone="zone_value", + ) + ) + response = await client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.HardwareGroup) + assert response.name == "name_value" + assert response.hardware_count == 1494 + assert response.site == "site_value" + assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED + assert response.zone == "zone_value" + + +@pytest.mark.asyncio +async def test_get_hardware_group_async_from_dict(): + await test_get_hardware_group_async(request_type=dict) + + +def test_get_hardware_group_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetHardwareGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + call.return_value = resources.HardwareGroup() + client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_hardware_group_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetHardwareGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.HardwareGroup() + ) + await client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_hardware_group_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.HardwareGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_hardware_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_hardware_group_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_hardware_group( + service.GetHardwareGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_hardware_group_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.HardwareGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.HardwareGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_hardware_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_hardware_group_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_hardware_group( + service.GetHardwareGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateHardwareGroupRequest, + dict, + ], +) +def test_create_hardware_group(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_hardware_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateHardwareGroupRequest() + + +def test_create_hardware_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateHardwareGroupRequest( + parent="parent_value", + hardware_group_id="hardware_group_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_hardware_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateHardwareGroupRequest( + parent="parent_value", + hardware_group_id="hardware_group_id_value", + request_id="request_id_value", + ) + + +def test_create_hardware_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_hardware_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_hardware_group + ] = mock_rpc + request = {} + client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_hardware_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateHardwareGroupRequest() + + +@pytest.mark.asyncio +async def test_create_hardware_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_hardware_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_hardware_group + ] = mock_object + + request = {} + await client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_hardware_group_async( + transport: str = "grpc_asyncio", request_type=service.CreateHardwareGroupRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_hardware_group_async_from_dict(): + await test_create_hardware_group_async(request_type=dict) + + +def test_create_hardware_group_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateHardwareGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_hardware_group_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateHardwareGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_hardware_group_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_hardware_group( + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].hardware_group + mock_val = resources.HardwareGroup(name="name_value") + assert arg == mock_val + arg = args[0].hardware_group_id + mock_val = "hardware_group_id_value" + assert arg == mock_val + + +def test_create_hardware_group_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_hardware_group( + service.CreateHardwareGroupRequest(), + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_hardware_group_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_hardware_group( + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].hardware_group + mock_val = resources.HardwareGroup(name="name_value") + assert arg == mock_val + arg = args[0].hardware_group_id + mock_val = "hardware_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_hardware_group_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_hardware_group( + service.CreateHardwareGroupRequest(), + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateHardwareGroupRequest, + dict, + ], +) +def test_update_hardware_group(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_hardware_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateHardwareGroupRequest() + + +def test_update_hardware_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateHardwareGroupRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_hardware_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateHardwareGroupRequest( + request_id="request_id_value", + ) + + +def test_update_hardware_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_hardware_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_hardware_group + ] = mock_rpc + request = {} + client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_hardware_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateHardwareGroupRequest() + + +@pytest.mark.asyncio +async def test_update_hardware_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_hardware_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_hardware_group + ] = mock_object + + request = {} + await client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_hardware_group_async( + transport: str = "grpc_asyncio", request_type=service.UpdateHardwareGroupRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_hardware_group_async_from_dict(): + await test_update_hardware_group_async(request_type=dict) + + +def test_update_hardware_group_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateHardwareGroupRequest() + + request.hardware_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "hardware_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_hardware_group_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateHardwareGroupRequest() + + request.hardware_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "hardware_group.name=name_value", + ) in kw["metadata"] + + +def test_update_hardware_group_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_hardware_group( + hardware_group=resources.HardwareGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].hardware_group + mock_val = resources.HardwareGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_hardware_group_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_hardware_group( + service.UpdateHardwareGroupRequest(), + hardware_group=resources.HardwareGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_hardware_group_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_hardware_group( + hardware_group=resources.HardwareGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].hardware_group + mock_val = resources.HardwareGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_hardware_group_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_hardware_group( + service.UpdateHardwareGroupRequest(), + hardware_group=resources.HardwareGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteHardwareGroupRequest, + dict, + ], +) +def test_delete_hardware_group(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_hardware_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteHardwareGroupRequest() + + +def test_delete_hardware_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteHardwareGroupRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_hardware_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteHardwareGroupRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_hardware_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_hardware_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_hardware_group + ] = mock_rpc + request = {} + client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_hardware_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_hardware_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteHardwareGroupRequest() + + +@pytest.mark.asyncio +async def test_delete_hardware_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_hardware_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_hardware_group + ] = mock_object + + request = {} + await client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_hardware_group_async( + transport: str = "grpc_asyncio", request_type=service.DeleteHardwareGroupRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteHardwareGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_hardware_group_async_from_dict(): + await test_delete_hardware_group_async(request_type=dict) + + +def test_delete_hardware_group_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteHardwareGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_hardware_group_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteHardwareGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_hardware_group_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_hardware_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_hardware_group_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_hardware_group( + service.DeleteHardwareGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_hardware_group_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_hardware_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_hardware_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_hardware_group_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_hardware_group( + service.DeleteHardwareGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListHardwareRequest, + dict, + ], +) +def test_list_hardware(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListHardwareResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHardwarePager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_hardware_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListHardwareRequest() + + +def test_list_hardware_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListHardwareRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_hardware(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListHardwareRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_hardware_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_hardware] = mock_rpc + request = {} + client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_hardware_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListHardwareRequest() + + +@pytest.mark.asyncio +async def test_list_hardware_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_hardware + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_hardware + ] = mock_object + + request = {} + await client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_hardware_async( + transport: str = "grpc_asyncio", request_type=service.ListHardwareRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHardwareAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_hardware_async_from_dict(): + await test_list_hardware_async(request_type=dict) + + +def test_list_hardware_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListHardwareRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + call.return_value = service.ListHardwareResponse() + client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_hardware_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListHardwareRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareResponse() + ) + await client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_hardware_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListHardwareResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_hardware( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_hardware_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hardware( + service.ListHardwareRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_hardware_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListHardwareResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListHardwareResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_hardware( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_hardware_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_hardware( + service.ListHardwareRequest(), + parent="parent_value", + ) + + +def test_list_hardware_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_hardware(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Hardware) for i in results) + + +def test_list_hardware_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hardware), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + RuntimeError, + ) + pages = list(client.list_hardware(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_hardware_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_hardware( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Hardware) for i in responses) + + +@pytest.mark.asyncio +async def test_list_hardware_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hardware), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_hardware(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetHardwareRequest, + dict, + ], +) +def test_get_hardware(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Hardware( + name="name_value", + display_name="display_name_value", + order="order_value", + hardware_group="hardware_group_value", + site="site_value", + state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + zone="zone_value", + ) + response = client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Hardware) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.order == "order_value" + assert response.hardware_group == "hardware_group_value" + assert response.site == "site_value" + assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.zone == "zone_value" + + +def test_get_hardware_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetHardwareRequest() + + +def test_get_hardware_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetHardwareRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_hardware(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetHardwareRequest( + name="name_value", + ) + + +def test_get_hardware_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_hardware] = mock_rpc + request = {} + client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_hardware_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Hardware( + name="name_value", + display_name="display_name_value", + order="order_value", + hardware_group="hardware_group_value", + site="site_value", + state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + zone="zone_value", + ) + ) + response = await client.get_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetHardwareRequest() + + +@pytest.mark.asyncio +async def test_get_hardware_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_hardware + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_hardware + ] = mock_object + + request = {} + await client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_hardware_async( + transport: str = "grpc_asyncio", request_type=service.GetHardwareRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Hardware( + name="name_value", + display_name="display_name_value", + order="order_value", + hardware_group="hardware_group_value", + site="site_value", + state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + zone="zone_value", + ) + ) + response = await client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Hardware) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.order == "order_value" + assert response.hardware_group == "hardware_group_value" + assert response.site == "site_value" + assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.zone == "zone_value" + + +@pytest.mark.asyncio +async def test_get_hardware_async_from_dict(): + await test_get_hardware_async(request_type=dict) + + +def test_get_hardware_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetHardwareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + call.return_value = resources.Hardware() + client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_hardware_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetHardwareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Hardware()) + await client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_hardware_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Hardware() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_hardware( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_hardware_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_hardware( + service.GetHardwareRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_hardware_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Hardware() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Hardware()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_hardware( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_hardware_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_hardware( + service.GetHardwareRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateHardwareRequest, + dict, + ], +) +def test_create_hardware(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_hardware_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateHardwareRequest() + + +def test_create_hardware_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateHardwareRequest( + parent="parent_value", + hardware_id="hardware_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_hardware(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateHardwareRequest( + parent="parent_value", + hardware_id="hardware_id_value", + ) + + +def test_create_hardware_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_hardware] = mock_rpc + request = {} + client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_hardware_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateHardwareRequest() + + +@pytest.mark.asyncio +async def test_create_hardware_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_hardware + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_hardware + ] = mock_object + + request = {} + await client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_hardware_async( + transport: str = "grpc_asyncio", request_type=service.CreateHardwareRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_hardware_async_from_dict(): + await test_create_hardware_async(request_type=dict) + + +def test_create_hardware_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateHardwareRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_hardware_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateHardwareRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_hardware_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_hardware( + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].hardware + mock_val = resources.Hardware(name="name_value") + assert arg == mock_val + arg = args[0].hardware_id + mock_val = "hardware_id_value" + assert arg == mock_val + + +def test_create_hardware_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_hardware( + service.CreateHardwareRequest(), + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_hardware_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_hardware( + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].hardware + mock_val = resources.Hardware(name="name_value") + assert arg == mock_val + arg = args[0].hardware_id + mock_val = "hardware_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_hardware_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_hardware( + service.CreateHardwareRequest(), + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateHardwareRequest, + dict, + ], +) +def test_update_hardware(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_hardware_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateHardwareRequest() + + +def test_update_hardware_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateHardwareRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_hardware(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateHardwareRequest( + request_id="request_id_value", + ) + + +def test_update_hardware_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_hardware] = mock_rpc + request = {} + client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_hardware_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateHardwareRequest() + + +@pytest.mark.asyncio +async def test_update_hardware_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_hardware + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_hardware + ] = mock_object + + request = {} + await client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_hardware_async( + transport: str = "grpc_asyncio", request_type=service.UpdateHardwareRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_hardware_async_from_dict(): + await test_update_hardware_async(request_type=dict) + + +def test_update_hardware_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateHardwareRequest() + + request.hardware.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "hardware.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_hardware_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateHardwareRequest() + + request.hardware.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "hardware.name=name_value", + ) in kw["metadata"] + + +def test_update_hardware_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_hardware( + hardware=resources.Hardware(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].hardware + mock_val = resources.Hardware(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_hardware_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_hardware( + service.UpdateHardwareRequest(), + hardware=resources.Hardware(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_hardware_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_hardware( + hardware=resources.Hardware(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].hardware + mock_val = resources.Hardware(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_hardware_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_hardware( + service.UpdateHardwareRequest(), + hardware=resources.Hardware(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteHardwareRequest, + dict, + ], +) +def test_delete_hardware(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_hardware_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteHardwareRequest() + + +def test_delete_hardware_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteHardwareRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_hardware(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteHardwareRequest( + name="name_value", + ) + + +def test_delete_hardware_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_hardware] = mock_rpc + request = {} + client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_hardware_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_hardware() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteHardwareRequest() + + +@pytest.mark.asyncio +async def test_delete_hardware_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_hardware + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_hardware + ] = mock_object + + request = {} + await client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_hardware_async( + transport: str = "grpc_asyncio", request_type=service.DeleteHardwareRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteHardwareRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_hardware_async_from_dict(): + await test_delete_hardware_async(request_type=dict) + + +def test_delete_hardware_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteHardwareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_hardware_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteHardwareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_hardware_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_hardware( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_hardware_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_hardware( + service.DeleteHardwareRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_hardware_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_hardware), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_hardware( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_hardware_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_hardware( + service.DeleteHardwareRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListCommentsRequest, + dict, + ], +) +def test_list_comments(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListCommentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListCommentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCommentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_comments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_comments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListCommentsRequest() + + +def test_list_comments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListCommentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_comments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListCommentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_comments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_comments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_comments] = mock_rpc + request = {} + client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_comments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_comments_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListCommentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_comments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListCommentsRequest() + + +@pytest.mark.asyncio +async def test_list_comments_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_comments + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_comments + ] = mock_object + + request = {} + await client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_comments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_comments_async( + transport: str = "grpc_asyncio", request_type=service.ListCommentsRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListCommentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListCommentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCommentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_comments_async_from_dict(): + await test_list_comments_async(request_type=dict) + + +def test_list_comments_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListCommentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + call.return_value = service.ListCommentsResponse() + client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_comments_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListCommentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListCommentsResponse() + ) + await client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_comments_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListCommentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_comments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_comments_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_comments( + service.ListCommentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_comments_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListCommentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListCommentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_comments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_comments_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_comments( + service.ListCommentsRequest(), + parent="parent_value", + ) + + +def test_list_comments_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_comments(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Comment) for i in results) + + +def test_list_comments_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_comments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_comments_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_comments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_comments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Comment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_comments_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_comments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_comments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetCommentRequest, + dict, + ], +) +def test_get_comment(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + ) + response = client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetCommentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + + +def test_get_comment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_comment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetCommentRequest() + + +def test_get_comment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetCommentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_comment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetCommentRequest( + name="name_value", + ) + + +def test_get_comment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_comment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_comment] = mock_rpc + request = {} + client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_comment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_comment_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Comment( + name="name_value", + author="author_value", + text="text_value", + ) + ) + response = await client.get_comment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetCommentRequest() + + +@pytest.mark.asyncio +async def test_get_comment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_comment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_comment + ] = mock_object + + request = {} + await client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_comment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_comment_async( + transport: str = "grpc_asyncio", request_type=service.GetCommentRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Comment( + name="name_value", + author="author_value", + text="text_value", + ) + ) + response = await client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetCommentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + + +@pytest.mark.asyncio +async def test_get_comment_async_from_dict(): + await test_get_comment_async(request_type=dict) + + +def test_get_comment_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetCommentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + call.return_value = resources.Comment() + client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_comment_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetCommentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Comment()) + await client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_comment_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Comment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_comment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_comment_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_comment( + service.GetCommentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_comment_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Comment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Comment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_comment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_comment_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_comment( + service.GetCommentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateCommentRequest, + dict, + ], +) +def test_create_comment(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateCommentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_comment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_comment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateCommentRequest() + + +def test_create_comment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateCommentRequest( + parent="parent_value", + comment_id="comment_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_comment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateCommentRequest( + parent="parent_value", + comment_id="comment_id_value", + request_id="request_id_value", + ) + + +def test_create_comment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_comment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_comment] = mock_rpc + request = {} + client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_comment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_comment_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_comment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateCommentRequest() + + +@pytest.mark.asyncio +async def test_create_comment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_comment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_comment + ] = mock_object + + request = {} + await client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_comment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_comment_async( + transport: str = "grpc_asyncio", request_type=service.CreateCommentRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateCommentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_comment_async_from_dict(): + await test_create_comment_async(request_type=dict) + + +def test_create_comment_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateCommentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_comment_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateCommentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_comment_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_comment( + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].comment + mock_val = resources.Comment(name="name_value") + assert arg == mock_val + arg = args[0].comment_id + mock_val = "comment_id_value" + assert arg == mock_val + + +def test_create_comment_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_comment( + service.CreateCommentRequest(), + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_comment_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_comment( + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].comment + mock_val = resources.Comment(name="name_value") + assert arg == mock_val + arg = args[0].comment_id + mock_val = "comment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_comment_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_comment( + service.CreateCommentRequest(), + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListChangeLogEntriesRequest, + dict, + ], +) +def test_list_change_log_entries(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListChangeLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChangeLogEntriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_change_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_change_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListChangeLogEntriesRequest() + + +def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListChangeLogEntriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_change_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListChangeLogEntriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_change_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_change_log_entries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_change_log_entries + ] = mock_rpc + request = {} + client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_change_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_change_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_change_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListChangeLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_list_change_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_change_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_change_log_entries + ] = mock_object + + request = {} + await client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_change_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_change_log_entries_async( + transport: str = "grpc_asyncio", request_type=service.ListChangeLogEntriesRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListChangeLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChangeLogEntriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_change_log_entries_async_from_dict(): + await test_list_change_log_entries_async(request_type=dict) + + +def test_list_change_log_entries_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListChangeLogEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + call.return_value = service.ListChangeLogEntriesResponse() + client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_change_log_entries_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListChangeLogEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListChangeLogEntriesResponse() + ) + await client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_change_log_entries_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListChangeLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_change_log_entries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_change_log_entries_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_change_log_entries_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListChangeLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListChangeLogEntriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_change_log_entries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_change_log_entries_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", + ) + + +def test_list_change_log_entries_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_change_log_entries(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in results) + + +def test_list_change_log_entries_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_change_log_entries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_change_log_entries_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_change_log_entries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in responses) + + +@pytest.mark.asyncio +async def test_list_change_log_entries_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_change_log_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetChangeLogEntryRequest, + dict, + ], +) +def test_get_change_log_entry(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry( + name="name_value", + log="log_value", + ) + response = client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetChangeLogEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" + + +def test_get_change_log_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_change_log_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest() + + +def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetChangeLogEntryRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_change_log_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest( + name="name_value", + ) + + +def test_get_change_log_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_change_log_entry in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_change_log_entry + ] = mock_rpc + request = {} + client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_change_log_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_change_log_entry_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ChangeLogEntry( + name="name_value", + log="log_value", + ) + ) + response = await client.get_change_log_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest() + + +@pytest.mark.asyncio +async def test_get_change_log_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_change_log_entry + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_change_log_entry + ] = mock_object + + request = {} + await client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_change_log_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_change_log_entry_async( + transport: str = "grpc_asyncio", request_type=service.GetChangeLogEntryRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ChangeLogEntry( + name="name_value", + log="log_value", + ) + ) + response = await client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetChangeLogEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" + + +@pytest.mark.asyncio +async def test_get_change_log_entry_async_from_dict(): + await test_get_change_log_entry_async(request_type=dict) + + +def test_get_change_log_entry_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetChangeLogEntryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value = resources.ChangeLogEntry() + client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_change_log_entry_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetChangeLogEntryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ChangeLogEntry() + ) + await client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_change_log_entry_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_change_log_entry( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_change_log_entry_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_change_log_entry_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ChangeLogEntry() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_change_log_entry( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_change_log_entry_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSkusRequest, + dict, + ], +) +def test_list_skus(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListSkusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSkusPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_skus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_skus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest() + + +def test_list_skus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListSkusRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_skus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_skus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_skus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_skus] = mock_rpc + request = {} + client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_skus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_skus_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_skus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest() + + +@pytest.mark.asyncio +async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_skus + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_skus + ] = mock_object + + request = {} + await client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_skus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_skus_async( + transport: str = "grpc_asyncio", request_type=service.ListSkusRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListSkusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSkusAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_skus_async_from_dict(): + await test_list_skus_async(request_type=dict) + + +def test_list_skus_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSkusRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value = service.ListSkusResponse() + client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_skus_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSkusRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse() + ) + await client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_skus_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSkusResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_skus( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_skus_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_skus( + service.ListSkusRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_skus_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSkusResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_skus( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_skus_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_skus( + service.ListSkusRequest(), + parent="parent_value", + ) + + +def test_list_skus_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_skus(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Sku) for i in results) + + +def test_list_skus_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, + ) + pages = list(client.list_skus(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_skus_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_skus( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Sku) for i in responses) + + +@pytest.mark.asyncio +async def test_list_skus_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_skus(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSkuRequest, + dict, + ], +) +def test_get_sku(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, + ) + response = client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetSkuRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 + + +def test_get_sku_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sku() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest() + + +def test_get_sku_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetSkuRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sku(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest( + name="name_value", + ) + + +def test_get_sku_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sku in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sku] = mock_rpc + request = {} + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sku(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sku_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, + ) + ) + response = await client.get_sku() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest() + + +@pytest.mark.asyncio +async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sku + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sku + ] = mock_object + + request = {} + await client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_sku(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sku_async( + transport: str = "grpc_asyncio", request_type=service.GetSkuRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, + ) + ) + response = await client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetSkuRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 + + +@pytest.mark.asyncio +async def test_get_sku_async_from_dict(): + await test_get_sku_async(request_type=dict) + + +def test_get_sku_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSkuRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value = resources.Sku() + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_sku_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSkuRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) + await client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_sku_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Sku() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sku( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_sku_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sku( + service.GetSkuRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sku_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Sku() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sku( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_sku_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sku( + service.GetSkuRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListZonesRequest, + dict, + ], +) +def test_list_zones(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_zones_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_zones() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest() + + +def test_list_zones_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZonesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_zones(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_zones_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zones in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc + request = {} + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_zones_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_zones() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest() + + +@pytest.mark.asyncio +async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_zones + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_zones + ] = mock_object + + request = {} + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_zones_async( + transport: str = "grpc_asyncio", request_type=service.ListZonesRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_zones_async_from_dict(): + await test_list_zones_async(request_type=dict) + + +def test_list_zones_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZonesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_zones_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZonesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse() + ) + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_zones_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_zones( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_zones_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zones( + service.ListZonesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_zones_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_zones( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_zones_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_zones( + service.ListZonesRequest(), + parent="parent_value", + ) + + +def test_list_zones_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_zones(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) for i in results) + + +def test_list_zones_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zones(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_zones_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zones( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Zone) for i in responses) + + +@pytest.mark.asyncio +async def test_list_zones_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zones(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetZoneRequest, + dict, + ], +) +def test_get_zone(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) + response = client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" + + +def test_get_zone_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetZoneRequest() + + +def test_get_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetZoneRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetZoneRequest( + name="name_value", + ) + + +def test_get_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + request = {} + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_zone_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) + ) + response = await client.get_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetZoneRequest() + + +@pytest.mark.asyncio +async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_zone + ] = mock_object + + request = {} + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_zone_async( + transport: str = "grpc_asyncio", request_type=service.GetZoneRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) + ) + response = await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" + + +@pytest.mark.asyncio +async def test_get_zone_async_from_dict(): + await test_get_zone_async(request_type=dict) + + +def test_get_zone_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value = resources.Zone() + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_zone_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_zone_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_zone( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_zone_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_zone( + service.GetZoneRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_zone_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_zone( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_zone_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_zone( + service.GetZoneRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateZoneRequest, + dict, + ], +) +def test_create_zone(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_zone_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateZoneRequest() + + +def test_create_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + ) + + +def test_create_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + request = {} + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_zone_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateZoneRequest() + + +@pytest.mark.asyncio +async def test_create_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_zone + ] = mock_object + + request = {} + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_zone_async( + transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_zone_async_from_dict(): + await test_create_zone_async(request_type=dict) + + +def test_create_zone_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateZoneRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_zone_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateZoneRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_zone_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_zone( + parent="parent_value", + zone=resources.Zone(name="name_value"), + zone_id="zone_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].zone_id + mock_val = "zone_id_value" + assert arg == mock_val + + +def test_create_zone_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", + zone=resources.Zone(name="name_value"), + zone_id="zone_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_zone_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_zone( + parent="parent_value", + zone=resources.Zone(name="name_value"), + zone_id="zone_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].zone_id + mock_val = "zone_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_zone_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", + zone=resources.Zone(name="name_value"), + zone_id="zone_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateZoneRequest, + dict, + ], +) +def test_update_zone(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_zone_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateZoneRequest() + + +def test_update_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateZoneRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateZoneRequest() + + +def test_update_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + request = {} + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_zone_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateZoneRequest() + + +@pytest.mark.asyncio +async def test_update_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_zone + ] = mock_object + + request = {} + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_zone_async( + transport: str = "grpc_asyncio", request_type=service.UpdateZoneRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_zone_async_from_dict(): + await test_update_zone_async(request_type=dict) + + +def test_update_zone_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateZoneRequest() + + request.zone.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "zone.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_zone_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateZoneRequest() + + request.zone.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "zone.name=name_value", + ) in kw["metadata"] + + +def test_update_zone_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_zone( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_zone_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_zone_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_zone( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_zone_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteZoneRequest, + dict, + ], +) +def test_delete_zone(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_zone_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteZoneRequest() + + +def test_delete_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteZoneRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteZoneRequest( + name="name_value", + ) + + +def test_delete_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + request = {} + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_zone_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteZoneRequest() + + +@pytest.mark.asyncio +async def test_delete_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_zone + ] = mock_object + + request = {} + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_zone_async( + transport: str = "grpc_asyncio", request_type=service.DeleteZoneRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_zone_async_from_dict(): + await test_delete_zone_async(request_type=dict) + + +def test_delete_zone_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_zone_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_zone_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_zone( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_zone_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_zone( + service.DeleteZoneRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_zone_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_zone( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_zone_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_zone( + service.DeleteZoneRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SignalZoneStateRequest, + dict, + ], +) +def test_signal_zone_state(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.SignalZoneStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_signal_zone_state_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.signal_zone_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest() + + +def test_signal_zone_state_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.SignalZoneStateRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.signal_zone_state(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest( + name="name_value", + ) + + +def test_signal_zone_state_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.signal_zone_state in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.signal_zone_state + ] = mock_rpc + request = {} + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_signal_zone_state_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.signal_zone_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest() + + +@pytest.mark.asyncio +async def test_signal_zone_state_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.signal_zone_state + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.signal_zone_state + ] = mock_object + + request = {} + await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_signal_zone_state_async( + transport: str = "grpc_asyncio", request_type=service.SignalZoneStateRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.SignalZoneStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_signal_zone_state_async_from_dict(): + await test_signal_zone_state_async(request_type=dict) + + +def test_signal_zone_state_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SignalZoneStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_signal_zone_state_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SignalZoneStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_signal_zone_state_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.signal_zone_state( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].state_signal + mock_val = service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP + assert arg == mock_val + + +def test_signal_zone_state_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + ) + + +@pytest.mark.asyncio +async def test_signal_zone_state_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.signal_zone_state( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].state_signal + mock_val = service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_signal_zone_state_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListOrdersRequest, + dict, + ], +) +def test_list_orders_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_orders(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_orders_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_orders in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + + request = {} + client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_orders(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_orders(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_orders_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_orders._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_orders_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_orders" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_orders" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListOrdersRequest.pb(service.ListOrdersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListOrdersResponse.to_json( + service.ListOrdersResponse() + ) + + request = service.ListOrdersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListOrdersResponse() + + client.list_orders( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_orders_rest_bad_request( + transport: str = "rest", request_type=service.ListOrdersRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_orders(request) + + +def test_list_orders_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_orders(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/orders" + % client.transport._host, + args[1], + ) + + +def test_list_orders_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListOrdersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_orders(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Order) for i in results) + + pages = list(client.list_orders(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetOrderRequest, + dict, + ], +) +def test_get_order_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Order( + name="name_value", + display_name="display_name_value", + state=resources.Order.State.DRAFT, + target_workloads=["target_workloads_value"], + customer_motivation="customer_motivation_value", + region_code="region_code_value", + order_form_uri="order_form_uri_value", + type_=resources.Order.Type.PAID, + billing_id="billing_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_order(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Order.State.DRAFT + assert response.target_workloads == ["target_workloads_value"] + assert response.customer_motivation == "customer_motivation_value" + assert response.region_code == "region_code_value" + assert response.order_form_uri == "order_form_uri_value" + assert response.type_ == resources.Order.Type.PAID + assert response.billing_id == "billing_id_value" + + +def test_get_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + + request = {} + client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Order() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_order_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_order_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_order" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetOrderRequest.pb(service.GetOrderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Order.to_json(resources.Order()) + + request = service.GetOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Order() + + client.get_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_order_rest_bad_request( + transport: str = "rest", request_type=service.GetOrderRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_order(request) + + +def test_get_order_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Order() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*}" + % client.transport._host, + args[1], + ) + + +def test_get_order_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_order( + service.GetOrderRequest(), + name="name_value", + ) + + +def test_get_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateOrderRequest, + dict, + ], +) +def test_create_order_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["order"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "target_workloads": ["target_workloads_value1", "target_workloads_value2"], + "customer_motivation": "customer_motivation_value", + "fulfillment_time": {}, + "region_code": "region_code_value", + "order_form_uri": "order_form_uri_value", + "type_": 1, + "submit_time": {}, + "billing_id": "billing_id_value", + "existing_hardware": [ + { + "site": "site_value", + "rack_location": "rack_location_value", + "rack_space": [{"start_rack_unit": 1613, "end_rack_unit": 1366}], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateOrderRequest.meta.fields["order"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["order"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["order"][field])): + del request_init["order"][field][i][subfield] + else: + del request_init["order"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_order] = mock_rpc + + request = {} + client.create_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_order_rest_required_fields(request_type=service.CreateOrderRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_order._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_order_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderId", + "requestId", + ) + ) + & set( + ( + "parent", + "order", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_order_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_order" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_create_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateOrderRequest.pb(service.CreateOrderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_order_rest_bad_request( + transport: str = "rest", request_type=service.CreateOrderRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_order(request) + + +def test_create_order_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/orders" + % client.transport._host, + args[1], + ) + + +def test_create_order_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_order( + service.CreateOrderRequest(), + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", + ) + + +def test_create_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateOrderRequest, + dict, + ], +) +def test_update_order_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } + request_init["order"] = { + "name": "projects/sample1/locations/sample2/orders/sample3", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "target_workloads": ["target_workloads_value1", "target_workloads_value2"], + "customer_motivation": "customer_motivation_value", + "fulfillment_time": {}, + "region_code": "region_code_value", + "order_form_uri": "order_form_uri_value", + "type_": 1, + "submit_time": {}, + "billing_id": "billing_id_value", + "existing_hardware": [ + { + "site": "site_value", + "rack_location": "rack_location_value", + "rack_space": [{"start_rack_unit": 1613, "end_rack_unit": 1366}], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateOrderRequest.meta.fields["order"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["order"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["order"][field])): + del request_init["order"][field][i][subfield] + else: + del request_init["order"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_order] = mock_rpc + + request = {} + client.update_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_order_rest_required_fields(request_type=service.UpdateOrderRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_order._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_order_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "order", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_order_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_update_order" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_update_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateOrderRequest.pb(service.UpdateOrderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_order_rest_bad_request( + transport: str = "rest", request_type=service.UpdateOrderRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_order(request) + + +def test_update_order_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + order=resources.Order(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{order.name=projects/*/locations/*/orders/*}" + % client.transport._host, + args[1], + ) + + +def test_update_order_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_order( + service.UpdateOrderRequest(), + order=resources.Order(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteOrderRequest, + dict, + ], +) +def test_delete_order_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_order] = mock_rpc + + request = {} + client.delete_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_order_rest_required_fields(request_type=service.DeleteOrderRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_order._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_order_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_order_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_delete_order" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_delete_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteOrderRequest.pb(service.DeleteOrderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_order_rest_bad_request( + transport: str = "rest", request_type=service.DeleteOrderRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_order(request) + + +def test_delete_order_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_order_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_order( + service.DeleteOrderRequest(), + name="name_value", + ) + + +def test_delete_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SubmitOrderRequest, + dict, + ], +) +def test_submit_order_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.submit_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_submit_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.submit_order] = mock_rpc + + request = {} + client.submit_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.submit_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_submit_order_rest_required_fields(request_type=service.SubmitOrderRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.submit_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_submit_order_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.submit_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_submit_order_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_submit_order" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_submit_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.SubmitOrderRequest.pb(service.SubmitOrderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.SubmitOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.submit_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_submit_order_rest_bad_request( + transport: str = "rest", request_type=service.SubmitOrderRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.submit_order(request) + + +def test_submit_order_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.submit_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*}:submit" + % client.transport._host, + args[1], + ) + + +def test_submit_order_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_order( + service.SubmitOrderRequest(), + name="name_value", + ) + + +def test_submit_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSitesRequest, + dict, + ], +) +def test_list_sites_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSitesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_sites(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_sites_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sites in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sites] = mock_rpc + + request = {} + client.list_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sites(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sites._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListSitesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_sites_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sites_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_sites" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_sites" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListSitesRequest.pb(service.ListSitesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListSitesResponse.to_json( + service.ListSitesResponse() + ) + + request = service.ListSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListSitesResponse() + + client.list_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_sites_rest_bad_request( + transport: str = "rest", request_type=service.ListSitesRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_sites(request) + + +def test_list_sites_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSitesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_sites(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, + args[1], + ) + + +def test_list_sites_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sites( + service.ListSitesRequest(), + parent="parent_value", + ) + + +def test_list_sites_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListSitesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_sites(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Site) for i in results) + + pages = list(client.list_sites(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSiteRequest, + dict, + ], +) +def test_get_site_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Site( + name="name_value", + display_name="display_name_value", + description="description_value", + google_maps_pin_uri="google_maps_pin_uri_value", + notes="notes_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Site.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_site(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Site) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.google_maps_pin_uri == "google_maps_pin_uri_value" + assert response.notes == "notes_value" + + +def test_get_site_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_site in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_site] = mock_rpc + + request = {} + client.get_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Site() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Site.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_site_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_site_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_site" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetSiteRequest.pb(service.GetSiteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Site.to_json(resources.Site()) + + request = service.GetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Site() + + client.get_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_site_rest_bad_request( + transport: str = "rest", request_type=service.GetSiteRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_site(request) + + +def test_get_site_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Site() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/sites/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Site.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/sites/*}" % client.transport._host, + args[1], + ) + + +def test_get_site_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site( + service.GetSiteRequest(), + name="name_value", + ) + + +def test_get_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSiteRequest, + dict, + ], +) +def test_create_site_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["site"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "google_maps_pin_uri": "google_maps_pin_uri_value", + "access_times": {}, + "notes": "notes_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateSiteRequest.meta.fields["site"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["site"][field])): + del request_init["site"][field][i][subfield] + else: + del request_init["site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_site_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_site in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_site] = mock_rpc + + request = {} + client.create_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_site._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "site_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_site_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "siteId", + ) + ) + & set( + ( + "parent", + "site", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_site_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_site" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_create_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateSiteRequest.pb(service.CreateSiteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_site_rest_bad_request( + transport: str = "rest", request_type=service.CreateSiteRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_site(request) + + +def test_create_site_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, + args[1], + ) + + +def test_create_site_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_site( + service.CreateSiteRequest(), + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", + ) + + +def test_create_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateSiteRequest, + dict, + ], +) +def test_update_site_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } + request_init["site"] = { + "name": "projects/sample1/locations/sample2/sites/sample3", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "google_maps_pin_uri": "google_maps_pin_uri_value", + "access_times": {}, + "notes": "notes_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateSiteRequest.meta.fields["site"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["site"][field])): + del request_init["site"][field][i][subfield] + else: + del request_init["site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_site_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_site in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_site] = mock_rpc + + request = {} + client.update_site(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_site(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_site._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_site_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "site", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_site_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_update_site" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_update_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateSiteRequest.pb(service.UpdateSiteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_site_rest_bad_request( + transport: str = "rest", request_type=service.UpdateSiteRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_site(request) + + +def test_update_site_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{site.name=projects/*/locations/*/sites/*}" + % client.transport._host, + args[1], + ) + + +def test_update_site_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_site( + service.UpdateSiteRequest(), + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListHardwareGroupsRequest, + dict, + ], +) +def test_list_hardware_groups_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListHardwareGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_hardware_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHardwareGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_hardware_groups in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_hardware_groups + ] = mock_rpc + + request = {} + client.list_hardware_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_hardware_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_hardware_groups_rest_required_fields( + request_type=service.ListHardwareGroupsRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_hardware_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_hardware_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListHardwareGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_hardware_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_hardware_groups_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_hardware_groups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_hardware_groups_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_hardware_groups" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware_groups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListHardwareGroupsRequest.pb( + service.ListHardwareGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListHardwareGroupsResponse.to_json( + service.ListHardwareGroupsResponse() + ) + + request = service.ListHardwareGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListHardwareGroupsResponse() + + client.list_hardware_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_hardware_groups_rest_bad_request( + transport: str = "rest", request_type=service.ListHardwareGroupsRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_hardware_groups(request) + + +def test_list_hardware_groups_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListHardwareGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_hardware_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" + % client.transport._host, + args[1], + ) + + +def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hardware_groups( + service.ListHardwareGroupsRequest(), + parent="parent_value", + ) + + +def test_list_hardware_groups_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListHardwareGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_hardware_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.HardwareGroup) for i in results) + + pages = list(client.list_hardware_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetHardwareGroupRequest, + dict, + ], +) +def test_get_hardware_group_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.HardwareGroup( + name="name_value", + hardware_count=1494, + site="site_value", + state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.HardwareGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_hardware_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.HardwareGroup) + assert response.name == "name_value" + assert response.hardware_count == 1494 + assert response.site == "site_value" + assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED + assert response.zone == "zone_value" + + +def test_get_hardware_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_hardware_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_hardware_group + ] = mock_rpc + + request = {} + client.get_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_hardware_group_rest_required_fields( + request_type=service.GetHardwareGroupRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_hardware_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_hardware_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.HardwareGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.HardwareGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_hardware_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_hardware_group_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_hardware_group_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_hardware_group" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetHardwareGroupRequest.pb( + service.GetHardwareGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.HardwareGroup.to_json( + resources.HardwareGroup() + ) + + request = service.GetHardwareGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.HardwareGroup() + + client.get_hardware_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.GetHardwareGroupRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_hardware_group(request) + + +def test_get_hardware_group_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.HardwareGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.HardwareGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_hardware_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_hardware_group( + service.GetHardwareGroupRequest(), + name="name_value", + ) + + +def test_get_hardware_group_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateHardwareGroupRequest, + dict, + ], +) +def test_create_hardware_group_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init["hardware_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "hardware_count": 1494, + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "site": "site_value", + "state": 1, + "zone": "zone_value", + "requested_installation_date": {"year": 433, "month": 550, "day": 318}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateHardwareGroupRequest.meta.fields["hardware_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware_group"][field])): + del request_init["hardware_group"][field][i][subfield] + else: + del request_init["hardware_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_hardware_group(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_hardware_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_hardware_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_hardware_group + ] = mock_rpc + + request = {} + client.create_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_hardware_group_rest_required_fields( + request_type=service.CreateHardwareGroupRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_hardware_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_hardware_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "hardware_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_hardware_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_hardware_group_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "hardwareGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "hardwareGroup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_hardware_group_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_hardware_group" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateHardwareGroupRequest.pb( + service.CreateHardwareGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateHardwareGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_hardware_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.CreateHardwareGroupRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_hardware_group(request) + + +def test_create_hardware_group_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_hardware_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" + % client.transport._host, + args[1], + ) + + +def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_hardware_group( + service.CreateHardwareGroupRequest(), + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", + ) + + +def test_create_hardware_group_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateHardwareGroupRequest, + dict, + ], +) +def test_update_hardware_group_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } + request_init["hardware_group"] = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "hardware_count": 1494, + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "site": "site_value", + "state": 1, + "zone": "zone_value", + "requested_installation_date": {"year": 433, "month": 550, "day": 318}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateHardwareGroupRequest.meta.fields["hardware_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware_group"][field])): + del request_init["hardware_group"][field][i][subfield] + else: + del request_init["hardware_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_hardware_group(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_hardware_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_hardware_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_hardware_group + ] = mock_rpc + + request = {} + client.update_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_hardware_group_rest_required_fields( + request_type=service.UpdateHardwareGroupRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_hardware_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_hardware_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_hardware_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_hardware_group_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "hardwareGroup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_hardware_group_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_update_hardware_group" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateHardwareGroupRequest.pb( + service.UpdateHardwareGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateHardwareGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_hardware_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.UpdateHardwareGroupRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_hardware_group(request) + + +def test_update_hardware_group_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + hardware_group=resources.HardwareGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_hardware_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{hardware_group.name=projects/*/locations/*/orders/*/hardwareGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_hardware_group( + service.UpdateHardwareGroupRequest(), + hardware_group=resources.HardwareGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_hardware_group_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteHardwareGroupRequest, + dict, + ], +) +def test_delete_hardware_group_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_hardware_group(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_hardware_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_hardware_group + ] = mock_rpc + + request = {} + client.delete_hardware_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_hardware_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_hardware_group_rest_required_fields( + request_type=service.DeleteHardwareGroupRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_hardware_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_hardware_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_hardware_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_hardware_group_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_hardware_group_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware_group" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteHardwareGroupRequest.pb( + service.DeleteHardwareGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteHardwareGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_hardware_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.DeleteHardwareGroupRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_hardware_group(request) + + +def test_delete_hardware_group_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_hardware_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_hardware_group( + service.DeleteHardwareGroupRequest(), + name="name_value", + ) + + +def test_delete_hardware_group_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListHardwareRequest, + dict, + ], +) +def test_list_hardware_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListHardwareResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_hardware(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHardwarePager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_hardware_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_hardware] = mock_rpc + + request = {} + client.list_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_hardware_rest_required_fields(request_type=service.ListHardwareRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_hardware._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_hardware._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListHardwareResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_hardware(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_hardware_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_hardware_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_hardware" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListHardwareRequest.pb(service.ListHardwareRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListHardwareResponse.to_json( + service.ListHardwareResponse() + ) + + request = service.ListHardwareRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListHardwareResponse() + + client.list_hardware( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_hardware_rest_bad_request( + transport: str = "rest", request_type=service.ListHardwareRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_hardware(request) + + +def test_list_hardware_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListHardwareResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_hardware(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/hardware" + % client.transport._host, + args[1], + ) + + +def test_list_hardware_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hardware( + service.ListHardwareRequest(), + parent="parent_value", + ) + + +def test_list_hardware_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListHardwareResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_hardware(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Hardware) for i in results) + + pages = list(client.list_hardware(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetHardwareRequest, + dict, + ], +) +def test_get_hardware_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Hardware( + name="name_value", + display_name="display_name_value", + order="order_value", + hardware_group="hardware_group_value", + site="site_value", + state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Hardware.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_hardware(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Hardware) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.order == "order_value" + assert response.hardware_group == "hardware_group_value" + assert response.site == "site_value" + assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.zone == "zone_value" + + +def test_get_hardware_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_hardware] = mock_rpc + + request = {} + client.get_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_hardware_rest_required_fields(request_type=service.GetHardwareRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_hardware._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_hardware._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Hardware() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Hardware.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_hardware(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_hardware_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_hardware._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_hardware_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_hardware" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetHardwareRequest.pb(service.GetHardwareRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Hardware.to_json(resources.Hardware()) + + request = service.GetHardwareRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Hardware() + + client.get_hardware( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_hardware_rest_bad_request( + transport: str = "rest", request_type=service.GetHardwareRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_hardware(request) + + +def test_get_hardware_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Hardware() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Hardware.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_hardware(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" + % client.transport._host, + args[1], + ) + + +def test_get_hardware_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_hardware( + service.GetHardwareRequest(), + name="name_value", + ) + + +def test_get_hardware_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateHardwareRequest, + dict, + ], +) +def test_create_hardware_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["hardware"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "order": "order_value", + "hardware_group": "hardware_group_value", + "site": "site_value", + "state": 1, + "ciq_uri": "ciq_uri_value", + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "estimated_installation_date": {"year": 433, "month": 550, "day": 318}, + "physical_info": { + "power_receptacle": 1, + "network_uplink": 1, + "voltage": 1, + "amperes": 1, + }, + "installation_info": { + "rack_location": "rack_location_value", + "power_distance_meters": 2246, + "switch_distance_meters": 2347, + "rack_unit_dimensions": { + "width_inches": 0.1273, + "height_inches": 0.13620000000000002, + "depth_inches": 0.1262, + }, + "rack_space": {"start_rack_unit": 1613, "end_rack_unit": 1366}, + "rack_type": 1, + }, + "zone": "zone_value", + "requested_installation_date": {}, + "actual_installation_date": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateHardwareRequest.meta.fields["hardware"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware"][field])): + del request_init["hardware"][field][i][subfield] + else: + del request_init["hardware"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_hardware(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_hardware_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_hardware] = mock_rpc + + request = {} + client.create_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_hardware_rest_required_fields( + request_type=service.CreateHardwareRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_hardware._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_hardware._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("hardware_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_hardware(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_hardware_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("hardwareId",)) + & set( + ( + "parent", + "hardware", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_hardware_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_hardware" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateHardwareRequest.pb(service.CreateHardwareRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateHardwareRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_hardware( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_hardware_rest_bad_request( + transport: str = "rest", request_type=service.CreateHardwareRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_hardware(request) + + +def test_create_hardware_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_hardware(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/hardware" + % client.transport._host, + args[1], + ) + + +def test_create_hardware_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_hardware( + service.CreateHardwareRequest(), + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", + ) + + +def test_create_hardware_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateHardwareRequest, + dict, + ], +) +def test_update_hardware_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } + request_init["hardware"] = { + "name": "projects/sample1/locations/sample2/hardware/sample3", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "order": "order_value", + "hardware_group": "hardware_group_value", + "site": "site_value", + "state": 1, + "ciq_uri": "ciq_uri_value", + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "estimated_installation_date": {"year": 433, "month": 550, "day": 318}, + "physical_info": { + "power_receptacle": 1, + "network_uplink": 1, + "voltage": 1, + "amperes": 1, + }, + "installation_info": { + "rack_location": "rack_location_value", + "power_distance_meters": 2246, + "switch_distance_meters": 2347, + "rack_unit_dimensions": { + "width_inches": 0.1273, + "height_inches": 0.13620000000000002, + "depth_inches": 0.1262, + }, + "rack_space": {"start_rack_unit": 1613, "end_rack_unit": 1366}, + "rack_type": 1, + }, + "zone": "zone_value", + "requested_installation_date": {}, + "actual_installation_date": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateHardwareRequest.meta.fields["hardware"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware"][field])): + del request_init["hardware"][field][i][subfield] + else: + del request_init["hardware"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_hardware(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_hardware_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_hardware] = mock_rpc + + request = {} + client.update_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_hardware_rest_required_fields( + request_type=service.UpdateHardwareRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_hardware._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_hardware._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_hardware(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_hardware_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "hardware", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_hardware_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_update_hardware" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateHardwareRequest.pb(service.UpdateHardwareRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateHardwareRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_hardware( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_hardware_rest_bad_request( + transport: str = "rest", request_type=service.UpdateHardwareRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_hardware(request) + + +def test_update_hardware_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + hardware=resources.Hardware(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_hardware(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{hardware.name=projects/*/locations/*/hardware/*}" + % client.transport._host, + args[1], + ) + + +def test_update_hardware_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_hardware( + service.UpdateHardwareRequest(), + hardware=resources.Hardware(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_hardware_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteHardwareRequest, + dict, + ], +) +def test_delete_hardware_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_hardware(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_hardware_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_hardware in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_hardware] = mock_rpc + + request = {} + client.delete_hardware(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_hardware(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_hardware_rest_required_fields( + request_type=service.DeleteHardwareRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_hardware._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_hardware._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_hardware(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_hardware_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_hardware._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_hardware_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteHardwareRequest.pb(service.DeleteHardwareRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteHardwareRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_hardware( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_hardware_rest_bad_request( + transport: str = "rest", request_type=service.DeleteHardwareRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_hardware(request) + + +def test_delete_hardware_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_hardware(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_hardware_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_hardware( + service.DeleteHardwareRequest(), + name="name_value", + ) + + +def test_delete_hardware_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListCommentsRequest, + dict, + ], +) +def test_list_comments_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListCommentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_comments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCommentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_comments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_comments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_comments] = mock_rpc + + request = {} + client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_comments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_comments_rest_required_fields(request_type=service.ListCommentsRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_comments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_comments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListCommentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_comments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_comments_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_comments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_comments_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_comments" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_comments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListCommentsRequest.pb(service.ListCommentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListCommentsResponse.to_json( + service.ListCommentsResponse() + ) + + request = service.ListCommentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListCommentsResponse() + + client.list_comments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_comments_rest_bad_request( + transport: str = "rest", request_type=service.ListCommentsRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_comments(request) + + +def test_list_comments_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListCommentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_comments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" + % client.transport._host, + args[1], + ) + + +def test_list_comments_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_comments( + service.ListCommentsRequest(), + parent="parent_value", + ) + + +def test_list_comments_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListCommentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_comments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Comment) for i in results) + + pages = list(client.list_comments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetCommentRequest, + dict, + ], +) +def test_get_comment_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_comment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + + +def test_get_comment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_comment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_comment] = mock_rpc + + request = {} + client.get_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_comment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_comment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_comment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Comment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_comment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_comment_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_comment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_comment_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_comment" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_comment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetCommentRequest.pb(service.GetCommentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Comment.to_json(resources.Comment()) + + request = service.GetCommentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Comment() + + client.get_comment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_comment_rest_bad_request( + transport: str = "rest", request_type=service.GetCommentRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_comment(request) + + +def test_get_comment_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Comment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_comment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_comment_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_comment( + service.GetCommentRequest(), + name="name_value", + ) + + +def test_get_comment_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateCommentRequest, + dict, + ], +) +def test_create_comment_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init["comment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "author": "author_value", + "text": "text_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateCommentRequest.meta.fields["comment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["comment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["comment"][field])): + del request_init["comment"][field][i][subfield] + else: + del request_init["comment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_comment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_comment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_comment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_comment] = mock_rpc + + request = {} + client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_comment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_comment_rest_required_fields(request_type=service.CreateCommentRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_comment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_comment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "comment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_comment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_comment_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_comment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "commentId", + "requestId", + ) + ) + & set( + ( + "parent", + "comment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_comment_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_comment" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_create_comment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateCommentRequest.pb(service.CreateCommentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateCommentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_comment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_comment_rest_bad_request( + transport: str = "rest", request_type=service.CreateCommentRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_comment(request) + + +def test_create_comment_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_comment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" + % client.transport._host, + args[1], + ) + + +def test_create_comment_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_comment( + service.CreateCommentRequest(), + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", + ) + + +def test_create_comment_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListChangeLogEntriesRequest, + dict, + ], +) +def test_list_change_log_entries_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListChangeLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_change_log_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChangeLogEntriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_change_log_entries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_change_log_entries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_change_log_entries + ] = mock_rpc + + request = {} + client.list_change_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_change_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_change_log_entries_rest_required_fields( + request_type=service.ListChangeLogEntriesRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_change_log_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_change_log_entries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListChangeLogEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListChangeLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_change_log_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_change_log_entries_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_change_log_entries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_change_log_entries_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_change_log_entries" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_change_log_entries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListChangeLogEntriesRequest.pb( + service.ListChangeLogEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListChangeLogEntriesResponse.to_json( + service.ListChangeLogEntriesResponse() + ) + + request = service.ListChangeLogEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListChangeLogEntriesResponse() + + client.list_change_log_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_change_log_entries_rest_bad_request( + transport: str = "rest", request_type=service.ListChangeLogEntriesRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_change_log_entries(request) + + +def test_list_change_log_entries_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListChangeLogEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListChangeLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_change_log_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/changeLogEntries" + % client.transport._host, + args[1], + ) + + +def test_list_change_log_entries_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", + ) + + +def test_list_change_log_entries_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListChangeLogEntriesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_change_log_entries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in results) + + pages = list(client.list_change_log_entries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetChangeLogEntryRequest, + dict, + ], +) +def test_get_change_log_entry_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/changeLogEntries/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ChangeLogEntry( + name="name_value", + log="log_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ChangeLogEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_change_log_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" + + +def test_get_change_log_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_change_log_entry in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_change_log_entry + ] = mock_rpc + + request = {} + client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_change_log_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_change_log_entry_rest_required_fields( + request_type=service.GetChangeLogEntryRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_change_log_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_change_log_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.ChangeLogEntry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.ChangeLogEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_change_log_entry(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_change_log_entry_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_change_log_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_change_log_entry_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_change_log_entry" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_change_log_entry" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetChangeLogEntryRequest.pb( + service.GetChangeLogEntryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.ChangeLogEntry.to_json( + resources.ChangeLogEntry() + ) + + request = service.GetChangeLogEntryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.ChangeLogEntry() + + client.get_change_log_entry( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_change_log_entry_rest_bad_request( + transport: str = "rest", request_type=service.GetChangeLogEntryRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/changeLogEntries/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_change_log_entry(request) + + +def test_get_change_log_entry_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ChangeLogEntry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/changeLogEntries/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ChangeLogEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_change_log_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/orders/*/changeLogEntries/*}" + % client.transport._host, + args[1], + ) + + +def test_get_change_log_entry_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", + ) + + +def test_get_change_log_entry_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSkusRequest, + dict, + ], +) +def test_list_skus_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSkusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_skus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSkusPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_skus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_skus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_skus] = mock_rpc + + request = {} + client.list_skus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_skus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_skus_rest_required_fields(request_type=service.ListSkusRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_skus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_skus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListSkusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSkusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_skus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_skus_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_skus._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_skus_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_skus" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_skus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListSkusRequest.pb(service.ListSkusRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListSkusResponse.to_json( + service.ListSkusResponse() + ) + + request = service.ListSkusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListSkusResponse() + + client.list_skus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_skus_rest_bad_request( + transport: str = "rest", request_type=service.ListSkusRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_skus(request) + + +def test_list_skus_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSkusResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSkusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_skus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/skus" % client.transport._host, + args[1], + ) + + +def test_list_skus_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_skus( + service.ListSkusRequest(), + parent="parent_value", + ) + + +def test_list_skus_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListSkusResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_skus(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Sku) for i in results) + + pages = list(client.list_skus(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSkuRequest, + dict, + ], +) +def test_get_sku_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/skus/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Sku.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_sku(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 + + +def test_get_sku_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sku in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sku] = mock_rpc + + request = {} + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sku(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_sku_rest_required_fields(request_type=service.GetSkuRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sku._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sku._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Sku() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Sku.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_sku(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_sku_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_sku._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_sku_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_sku" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_sku" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetSkuRequest.pb(service.GetSkuRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Sku.to_json(resources.Sku()) + + request = service.GetSkuRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Sku() + + client.get_sku( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_sku_rest_bad_request( + transport: str = "rest", request_type=service.GetSkuRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/skus/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_sku(request) + + +def test_get_sku_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Sku() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/skus/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Sku.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_sku(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/skus/*}" % client.transport._host, + args[1], + ) + + +def test_get_sku_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sku( + service.GetSkuRequest(), + name="name_value", + ) + + +def test_get_sku_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListZonesRequest, + dict, + ], +) +def test_list_zones_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListZonesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_zones(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_zones_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zones in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc + + request = {} + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_zones_rest_required_fields(request_type=service.ListZonesRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_zones._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_zones._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListZonesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListZonesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_zones(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_zones_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_zones._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_zones_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_zones" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_zones" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListZonesRequest.pb(service.ListZonesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListZonesResponse.to_json( + service.ListZonesResponse() + ) + + request = service.ListZonesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListZonesResponse() + + client.list_zones( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_zones_rest_bad_request( + transport: str = "rest", request_type=service.ListZonesRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_zones(request) + + +def test_list_zones_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListZonesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListZonesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_zones(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/zones" % client.transport._host, + args[1], + ) + + +def test_list_zones_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zones( + service.ListZonesRequest(), + parent="parent_value", + ) + + +def test_list_zones_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListZonesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_zones(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) for i in results) + + pages = list(client.list_zones(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetZoneRequest, + dict, + ], +) +def test_get_zone_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/zones/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_zone(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" + + +def test_get_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + + request = {} + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_zone_rest_required_fields(request_type=service.GetZoneRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Zone() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_zone(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_zone_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_zone_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_zone" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_get_zone" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetZoneRequest.pb(service.GetZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Zone.to_json(resources.Zone()) + + request = service.GetZoneRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Zone() + + client.get_zone( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_zone_rest_bad_request( + transport: str = "rest", request_type=service.GetZoneRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/zones/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_zone(request) + + +def test_get_zone_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Zone() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/zones/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/zones/*}" % client.transport._host, + args[1], + ) + + +def test_get_zone_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_zone( + service.GetZoneRequest(), + name="name_value", + ) + + +def test_get_zone_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateZoneRequest, + dict, + ], +) +def test_create_zone_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["zone"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "state": 1, + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + "ciq_uri": "ciq_uri_value", + "network_config": { + "machine_mgmt_ipv4_range": "machine_mgmt_ipv4_range_value", + "kubernetes_node_ipv4_range": "kubernetes_node_ipv4_range_value", + "kubernetes_control_plane_ipv4_range": "kubernetes_control_plane_ipv4_range_value", + "management_ipv4_subnet": { + "address_range": "address_range_value", + "default_gateway_ip_address": "default_gateway_ip_address_value", + }, + "kubernetes_ipv4_subnet": {}, + }, + "globally_unique_id": "globally_unique_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateZoneRequest.meta.fields["zone"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["zone"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["zone"][field])): + del request_init["zone"][field][i][subfield] + else: + del request_init["zone"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_zone(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + + request = {} + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_zone_rest_required_fields(request_type=service.CreateZoneRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_zone._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "zone_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_zone(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_zone_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_zone._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "zoneId", + ) + ) + & set( + ( + "parent", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_zone_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_zone" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_create_zone" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateZoneRequest.pb(service.CreateZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateZoneRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_zone( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_zone_rest_bad_request( + transport: str = "rest", request_type=service.CreateZoneRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_zone(request) + + +def test_create_zone_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + zone=resources.Zone(name="name_value"), + zone_id="zone_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/zones" % client.transport._host, + args[1], + ) + + +def test_create_zone_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", + zone=resources.Zone(name="name_value"), + zone_id="zone_id_value", + ) + + +def test_create_zone_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateZoneRequest, + dict, + ], +) +def test_update_zone_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "zone": {"name": "projects/sample1/locations/sample2/zones/sample3"} + } + request_init["zone"] = { + "name": "projects/sample1/locations/sample2/zones/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "state": 1, + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + "ciq_uri": "ciq_uri_value", + "network_config": { + "machine_mgmt_ipv4_range": "machine_mgmt_ipv4_range_value", + "kubernetes_node_ipv4_range": "kubernetes_node_ipv4_range_value", + "kubernetes_control_plane_ipv4_range": "kubernetes_control_plane_ipv4_range_value", + "management_ipv4_subnet": { + "address_range": "address_range_value", + "default_gateway_ip_address": "default_gateway_ip_address_value", + }, + "kubernetes_ipv4_subnet": {}, + }, + "globally_unique_id": "globally_unique_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateZoneRequest.meta.fields["zone"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["zone"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["zone"][field])): + del request_init["zone"][field][i][subfield] + else: + del request_init["zone"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_zone(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + + request = {} + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_zone_rest_required_fields(request_type=service.UpdateZoneRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_zone._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_zone(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_zone_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_zone._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_zone_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_update_zone" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_update_zone" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateZoneRequest.pb(service.UpdateZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateZoneRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_zone( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_zone_rest_bad_request( + transport: str = "rest", request_type=service.UpdateZoneRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "zone": {"name": "projects/sample1/locations/sample2/zones/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_zone(request) + + +def test_update_zone_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "zone": {"name": "projects/sample1/locations/sample2/zones/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{zone.name=projects/*/locations/*/zones/*}" + % client.transport._host, + args[1], + ) + + +def test_update_zone_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_zone_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteZoneRequest, + dict, + ], +) +def test_delete_zone_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/zones/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_zone(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + + request = {} + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_zone_rest_required_fields(request_type=service.DeleteZoneRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_zone._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_zone(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_zone_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_zone_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_delete_zone" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_delete_zone" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteZoneRequest.pb(service.DeleteZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteZoneRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_zone( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_zone_rest_bad_request( + transport: str = "rest", request_type=service.DeleteZoneRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/zones/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_zone(request) + + +def test_delete_zone_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/zones/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/zones/*}" % client.transport._host, + args[1], + ) + + +def test_delete_zone_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_zone( + service.DeleteZoneRequest(), + name="name_value", + ) + + +def test_delete_zone_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SignalZoneStateRequest, + dict, + ], +) +def test_signal_zone_state_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/zones/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.signal_zone_state(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_signal_zone_state_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.signal_zone_state in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.signal_zone_state + ] = mock_rpc + + request = {} + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_signal_zone_state_rest_required_fields( + request_type=service.SignalZoneStateRequest, +): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).signal_zone_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).signal_zone_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.signal_zone_state(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_signal_zone_state_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.signal_zone_state._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "stateSignal", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_signal_zone_state_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_signal_zone_state" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_signal_zone_state" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.SignalZoneStateRequest.pb(service.SignalZoneStateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.SignalZoneStateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.signal_zone_state( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_signal_zone_state_rest_bad_request( + transport: str = "rest", request_type=service.SignalZoneStateRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/zones/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.signal_zone_state(request) + + +def test_signal_zone_state_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/zones/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.signal_zone_state(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/zones/*}:signal" + % client.transport._host, + args[1], + ) + + +def test_signal_zone_state_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + ) + + +def test_signal_zone_state_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GDCHardwareManagementGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GDCHardwareManagementGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GDCHardwareManagementClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GDCHardwareManagementGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GDCHardwareManagementClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GDCHardwareManagementClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GDCHardwareManagementGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GDCHardwareManagementClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GDCHardwareManagementGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GDCHardwareManagementClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GDCHardwareManagementGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GDCHardwareManagementGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + transports.GDCHardwareManagementRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = GDCHardwareManagementClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GDCHardwareManagementGrpcTransport, + ) + + +def test_gdc_hardware_management_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GDCHardwareManagementTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_gdc_hardware_management_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.transports.GDCHardwareManagementTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GDCHardwareManagementTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_orders", + "get_order", + "create_order", + "update_order", + "delete_order", + "submit_order", + "list_sites", + "get_site", + "create_site", + "update_site", + "list_hardware_groups", + "get_hardware_group", + "create_hardware_group", + "update_hardware_group", + "delete_hardware_group", + "list_hardware", + "get_hardware", + "create_hardware", + "update_hardware", + "delete_hardware", + "list_comments", + "get_comment", + "create_comment", + "list_change_log_entries", + "get_change_log_entry", + "list_skus", + "get_sku", + "list_zones", + "get_zone", + "create_zone", + "update_zone", + "delete_zone", + "signal_zone_state", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_gdc_hardware_management_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.transports.GDCHardwareManagementTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GDCHardwareManagementTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_gdc_hardware_management_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.gdchardwaremanagement_v1alpha.services.gdc_hardware_management.transports.GDCHardwareManagementTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GDCHardwareManagementTransport() + adc.assert_called_once() + + +def test_gdc_hardware_management_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GDCHardwareManagementClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + ], +) +def test_gdc_hardware_management_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + transports.GDCHardwareManagementRestTransport, + ], +) +def test_gdc_hardware_management_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GDCHardwareManagementGrpcTransport, grpc_helpers), + (transports.GDCHardwareManagementGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_gdc_hardware_management_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "gdchardwaremanagement.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="gdchardwaremanagement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + ], +) +def test_gdc_hardware_management_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_gdc_hardware_management_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GDCHardwareManagementRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_gdc_hardware_management_rest_lro_client(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_gdc_hardware_management_host_no_port(transport_name): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="gdchardwaremanagement.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "gdchardwaremanagement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gdchardwaremanagement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_gdc_hardware_management_host_with_port(transport_name): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="gdchardwaremanagement.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "gdchardwaremanagement.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gdchardwaremanagement.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_gdc_hardware_management_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GDCHardwareManagementClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GDCHardwareManagementClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_orders._session + session2 = client2.transport.list_orders._session + assert session1 != session2 + session1 = client1.transport.get_order._session + session2 = client2.transport.get_order._session + assert session1 != session2 + session1 = client1.transport.create_order._session + session2 = client2.transport.create_order._session + assert session1 != session2 + session1 = client1.transport.update_order._session + session2 = client2.transport.update_order._session + assert session1 != session2 + session1 = client1.transport.delete_order._session + session2 = client2.transport.delete_order._session + assert session1 != session2 + session1 = client1.transport.submit_order._session + session2 = client2.transport.submit_order._session + assert session1 != session2 + session1 = client1.transport.list_sites._session + session2 = client2.transport.list_sites._session + assert session1 != session2 + session1 = client1.transport.get_site._session + session2 = client2.transport.get_site._session + assert session1 != session2 + session1 = client1.transport.create_site._session + session2 = client2.transport.create_site._session + assert session1 != session2 + session1 = client1.transport.update_site._session + session2 = client2.transport.update_site._session + assert session1 != session2 + session1 = client1.transport.list_hardware_groups._session + session2 = client2.transport.list_hardware_groups._session + assert session1 != session2 + session1 = client1.transport.get_hardware_group._session + session2 = client2.transport.get_hardware_group._session + assert session1 != session2 + session1 = client1.transport.create_hardware_group._session + session2 = client2.transport.create_hardware_group._session + assert session1 != session2 + session1 = client1.transport.update_hardware_group._session + session2 = client2.transport.update_hardware_group._session + assert session1 != session2 + session1 = client1.transport.delete_hardware_group._session + session2 = client2.transport.delete_hardware_group._session + assert session1 != session2 + session1 = client1.transport.list_hardware._session + session2 = client2.transport.list_hardware._session + assert session1 != session2 + session1 = client1.transport.get_hardware._session + session2 = client2.transport.get_hardware._session + assert session1 != session2 + session1 = client1.transport.create_hardware._session + session2 = client2.transport.create_hardware._session + assert session1 != session2 + session1 = client1.transport.update_hardware._session + session2 = client2.transport.update_hardware._session + assert session1 != session2 + session1 = client1.transport.delete_hardware._session + session2 = client2.transport.delete_hardware._session + assert session1 != session2 + session1 = client1.transport.list_comments._session + session2 = client2.transport.list_comments._session + assert session1 != session2 + session1 = client1.transport.get_comment._session + session2 = client2.transport.get_comment._session + assert session1 != session2 + session1 = client1.transport.create_comment._session + session2 = client2.transport.create_comment._session + assert session1 != session2 + session1 = client1.transport.list_change_log_entries._session + session2 = client2.transport.list_change_log_entries._session + assert session1 != session2 + session1 = client1.transport.get_change_log_entry._session + session2 = client2.transport.get_change_log_entry._session + assert session1 != session2 + session1 = client1.transport.list_skus._session + session2 = client2.transport.list_skus._session + assert session1 != session2 + session1 = client1.transport.get_sku._session + session2 = client2.transport.get_sku._session + assert session1 != session2 + session1 = client1.transport.list_zones._session + session2 = client2.transport.list_zones._session + assert session1 != session2 + session1 = client1.transport.get_zone._session + session2 = client2.transport.get_zone._session + assert session1 != session2 + session1 = client1.transport.create_zone._session + session2 = client2.transport.create_zone._session + assert session1 != session2 + session1 = client1.transport.update_zone._session + session2 = client2.transport.update_zone._session + assert session1 != session2 + session1 = client1.transport.delete_zone._session + session2 = client2.transport.delete_zone._session + assert session1 != session2 + session1 = client1.transport.signal_zone_state._session + session2 = client2.transport.signal_zone_state._session + assert session1 != session2 + + +def test_gdc_hardware_management_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GDCHardwareManagementGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_gdc_hardware_management_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GDCHardwareManagementGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + ], +) +def test_gdc_hardware_management_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GDCHardwareManagementGrpcTransport, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + ], +) +def test_gdc_hardware_management_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_gdc_hardware_management_grpc_lro_client(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_gdc_hardware_management_grpc_lro_async_client(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_change_log_entry_path(): + project = "squid" + location = "clam" + order = "whelk" + change_log_entry = "octopus" + expected = "projects/{project}/locations/{location}/orders/{order}/changeLogEntries/{change_log_entry}".format( + project=project, + location=location, + order=order, + change_log_entry=change_log_entry, + ) + actual = GDCHardwareManagementClient.change_log_entry_path( + project, location, order, change_log_entry + ) + assert expected == actual + + +def test_parse_change_log_entry_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "order": "cuttlefish", + "change_log_entry": "mussel", + } + path = GDCHardwareManagementClient.change_log_entry_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_change_log_entry_path(path) + assert expected == actual + + +def test_comment_path(): + project = "winkle" + location = "nautilus" + order = "scallop" + comment = "abalone" + expected = "projects/{project}/locations/{location}/orders/{order}/comments/{comment}".format( + project=project, + location=location, + order=order, + comment=comment, + ) + actual = GDCHardwareManagementClient.comment_path(project, location, order, comment) + assert expected == actual + + +def test_parse_comment_path(): + expected = { + "project": "squid", + "location": "clam", + "order": "whelk", + "comment": "octopus", + } + path = GDCHardwareManagementClient.comment_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_comment_path(path) + assert expected == actual + + +def test_hardware_path(): + project = "oyster" + location = "nudibranch" + hardware = "cuttlefish" + expected = "projects/{project}/locations/{location}/hardware/{hardware}".format( + project=project, + location=location, + hardware=hardware, + ) + actual = GDCHardwareManagementClient.hardware_path(project, location, hardware) + assert expected == actual + + +def test_parse_hardware_path(): + expected = { + "project": "mussel", + "location": "winkle", + "hardware": "nautilus", + } + path = GDCHardwareManagementClient.hardware_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_hardware_path(path) + assert expected == actual + + +def test_hardware_group_path(): + project = "scallop" + location = "abalone" + order = "squid" + hardware_group = "clam" + expected = "projects/{project}/locations/{location}/orders/{order}/hardwareGroups/{hardware_group}".format( + project=project, + location=location, + order=order, + hardware_group=hardware_group, + ) + actual = GDCHardwareManagementClient.hardware_group_path( + project, location, order, hardware_group + ) + assert expected == actual + + +def test_parse_hardware_group_path(): + expected = { + "project": "whelk", + "location": "octopus", + "order": "oyster", + "hardware_group": "nudibranch", + } + path = GDCHardwareManagementClient.hardware_group_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_hardware_group_path(path) + assert expected == actual + + +def test_order_path(): + project = "cuttlefish" + location = "mussel" + order = "winkle" + expected = "projects/{project}/locations/{location}/orders/{order}".format( + project=project, + location=location, + order=order, + ) + actual = GDCHardwareManagementClient.order_path(project, location, order) + assert expected == actual + + +def test_parse_order_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "order": "abalone", + } + path = GDCHardwareManagementClient.order_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_order_path(path) + assert expected == actual + + +def test_site_path(): + project = "squid" + location = "clam" + site = "whelk" + expected = "projects/{project}/locations/{location}/sites/{site}".format( + project=project, + location=location, + site=site, + ) + actual = GDCHardwareManagementClient.site_path(project, location, site) + assert expected == actual + + +def test_parse_site_path(): + expected = { + "project": "octopus", + "location": "oyster", + "site": "nudibranch", + } + path = GDCHardwareManagementClient.site_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_site_path(path) + assert expected == actual + + +def test_sku_path(): + project = "cuttlefish" + location = "mussel" + sku = "winkle" + expected = "projects/{project}/locations/{location}/skus/{sku}".format( + project=project, + location=location, + sku=sku, + ) + actual = GDCHardwareManagementClient.sku_path(project, location, sku) + assert expected == actual + + +def test_parse_sku_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "sku": "abalone", + } + path = GDCHardwareManagementClient.sku_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_sku_path(path) + assert expected == actual + + +def test_zone_path(): + project = "squid" + location = "clam" + zone = "whelk" + expected = "projects/{project}/locations/{location}/zones/{zone}".format( + project=project, + location=location, + zone=zone, + ) + actual = GDCHardwareManagementClient.zone_path(project, location, zone) + assert expected == actual + + +def test_parse_zone_path(): + expected = { + "project": "octopus", + "location": "oyster", + "zone": "nudibranch", + } + path = GDCHardwareManagementClient.zone_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_zone_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GDCHardwareManagementClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = GDCHardwareManagementClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GDCHardwareManagementClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = GDCHardwareManagementClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GDCHardwareManagementClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = GDCHardwareManagementClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = GDCHardwareManagementClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = GDCHardwareManagementClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GDCHardwareManagementClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = GDCHardwareManagementClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GDCHardwareManagementClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GDCHardwareManagementTransport, "_prep_wrapped_messages" + ) as prep: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GDCHardwareManagementTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GDCHardwareManagementClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GDCHardwareManagementClient, transports.GDCHardwareManagementGrpcTransport), + ( + GDCHardwareManagementAsyncClient, + transports.GDCHardwareManagementGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-gke-backup/CHANGELOG.md b/packages/google-cloud-gke-backup/CHANGELOG.md index c1852b9d8853..ed4473d12f54 100644 --- a/packages/google-cloud-gke-backup/CHANGELOG.md +++ b/packages/google-cloud-gke-backup/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [0.5.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-backup-v0.5.8...google-cloud-gke-backup-v0.5.9) (2024-05-27) + + +### Features + +* add fine-grained restore ([9268d28](https://github.com/googleapis/google-cloud-python/commit/9268d284f74af2398f50c3faf7af3278337d3f75)) +* add gitops ([9268d28](https://github.com/googleapis/google-cloud-python/commit/9268d284f74af2398f50c3faf7af3278337d3f75)) +* add restore order ([9268d28](https://github.com/googleapis/google-cloud-python/commit/9268d284f74af2398f50c3faf7af3278337d3f75)) +* add strict-permissive mode ([9268d28](https://github.com/googleapis/google-cloud-python/commit/9268d284f74af2398f50c3faf7af3278337d3f75)) +* add volume restore flexibility ([9268d28](https://github.com/googleapis/google-cloud-python/commit/9268d284f74af2398f50c3faf7af3278337d3f75)) + + +### Documentation + +* update duration comment to include new validation from smart scheduling ([9268d28](https://github.com/googleapis/google-cloud-python/commit/9268d284f74af2398f50c3faf7af3278337d3f75)) + ## [0.5.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-backup-v0.5.7...google-cloud-gke-backup-v0.5.8) (2024-03-26) diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup/__init__.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup/__init__.py index 6b6f4ea602f5..541e3b4a9c7c 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup/__init__.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup/__init__.py @@ -33,6 +33,7 @@ NamespacedName, NamespacedNames, Namespaces, + VolumeTypeEnum, ) from google.cloud.gke_backup_v1.types.gkebackup import ( CreateBackupPlanRequest, @@ -69,7 +70,12 @@ UpdateRestorePlanRequest, UpdateRestoreRequest, ) -from google.cloud.gke_backup_v1.types.restore import Restore, RestoreConfig +from google.cloud.gke_backup_v1.types.restore import ( + ResourceSelector, + Restore, + RestoreConfig, + VolumeDataRestorePolicyOverride, +) from google.cloud.gke_backup_v1.types.restore_plan import RestorePlan from google.cloud.gke_backup_v1.types.volume import VolumeBackup, VolumeRestore @@ -84,6 +90,7 @@ "NamespacedName", "NamespacedNames", "Namespaces", + "VolumeTypeEnum", "CreateBackupPlanRequest", "CreateBackupRequest", "CreateRestorePlanRequest", @@ -117,8 +124,10 @@ "UpdateBackupRequest", "UpdateRestorePlanRequest", "UpdateRestoreRequest", + "ResourceSelector", "Restore", "RestoreConfig", + "VolumeDataRestorePolicyOverride", "RestorePlan", "VolumeBackup", "VolumeRestore", diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/__init__.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/__init__.py index 1da59929b3be..21267dad7e6e 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/__init__.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/__init__.py @@ -21,7 +21,13 @@ from .services.backup_for_gke import BackupForGKEAsyncClient, BackupForGKEClient from .types.backup import Backup from .types.backup_plan import BackupPlan, ExclusionWindow, RpoConfig -from .types.common import EncryptionKey, NamespacedName, NamespacedNames, Namespaces +from .types.common import ( + EncryptionKey, + NamespacedName, + NamespacedNames, + Namespaces, + VolumeTypeEnum, +) from .types.gkebackup import ( CreateBackupPlanRequest, CreateBackupRequest, @@ -57,7 +63,12 @@ UpdateRestorePlanRequest, UpdateRestoreRequest, ) -from .types.restore import Restore, RestoreConfig +from .types.restore import ( + ResourceSelector, + Restore, + RestoreConfig, + VolumeDataRestorePolicyOverride, +) from .types.restore_plan import RestorePlan from .types.volume import VolumeBackup, VolumeRestore @@ -100,6 +111,7 @@ "NamespacedNames", "Namespaces", "OperationMetadata", + "ResourceSelector", "Restore", "RestoreConfig", "RestorePlan", @@ -109,5 +121,7 @@ "UpdateRestorePlanRequest", "UpdateRestoreRequest", "VolumeBackup", + "VolumeDataRestorePolicyOverride", "VolumeRestore", + "VolumeTypeEnum", ) diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/__init__.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/__init__.py index b1730b38b927..ed2f2db82cf7 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/__init__.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/__init__.py @@ -15,7 +15,13 @@ # from .backup import Backup from .backup_plan import BackupPlan, ExclusionWindow, RpoConfig -from .common import EncryptionKey, NamespacedName, NamespacedNames, Namespaces +from .common import ( + EncryptionKey, + NamespacedName, + NamespacedNames, + Namespaces, + VolumeTypeEnum, +) from .gkebackup import ( CreateBackupPlanRequest, CreateBackupRequest, @@ -51,7 +57,12 @@ UpdateRestorePlanRequest, UpdateRestoreRequest, ) -from .restore import Restore, RestoreConfig +from .restore import ( + ResourceSelector, + Restore, + RestoreConfig, + VolumeDataRestorePolicyOverride, +) from .restore_plan import RestorePlan from .volume import VolumeBackup, VolumeRestore @@ -64,6 +75,7 @@ "NamespacedName", "NamespacedNames", "Namespaces", + "VolumeTypeEnum", "CreateBackupPlanRequest", "CreateBackupRequest", "CreateRestorePlanRequest", @@ -97,8 +109,10 @@ "UpdateBackupRequest", "UpdateRestorePlanRequest", "UpdateRestoreRequest", + "ResourceSelector", "Restore", "RestoreConfig", + "VolumeDataRestorePolicyOverride", "RestorePlan", "VolumeBackup", "VolumeRestore", diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup.py index d8f19545788a..44acc389b262 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup.py @@ -171,6 +171,14 @@ class Backup(proto.Message): config_backup_size_bytes (int): Output only. The size of the config backup in bytes. + permissive_mode (bool): + Output only. If false, Backup will fail when Backup for GKE + detects Kubernetes configuration that is non-standard or + requires additional setup to restore. + + Inherited from the parent BackupPlan's + [permissive_mode][google.cloud.gkebackup.v1.BackupPlan.BackupConfig.permissive_mode] + value. """ class State(proto.Enum): @@ -387,6 +395,10 @@ class ClusterMetadata(proto.Message): proto.INT64, number=27, ) + permissive_mode: bool = proto.Field( + proto.BOOL, + number=28, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup_plan.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup_plan.py index 18cb5514741b..016917c215d7 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup_plan.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/backup_plan.py @@ -313,6 +313,13 @@ class BackupConfig(proto.Message): Default (empty): Config backup artifacts will not be encrypted. + permissive_mode (bool): + Optional. If false, Backups will fail when + Backup for GKE detects Kubernetes configuration + that is non-standard or requires additional + setup to restore. + + Default: False """ all_namespaces: bool = proto.Field( @@ -345,6 +352,10 @@ class BackupConfig(proto.Message): number=6, message=common.EncryptionKey, ) + permissive_mode: bool = proto.Field( + proto.BOOL, + number=7, + ) name: str = proto.Field( proto.STRING, @@ -474,9 +485,10 @@ class ExclusionWindow(proto.Message): Required. Specifies the start time of the window using time of the day in UTC. duration (google.protobuf.duration_pb2.Duration): - Required. Specifies duration of the window. Restrictions for - duration based on the recurrence type to allow some time for - backup to happen: + Required. Specifies duration of the window. Duration must be + >= 5 minutes and < (target RPO - 20 minutes). Additional + restrictions based on the recurrence type to allow some time + for backup to happen: - single_occurrence_date: no restriction, but UI may warn about this when duration >= target RPO diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/common.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/common.py index 8aff0e5c1688..a16c5b650336 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/common.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/common.py @@ -26,6 +26,7 @@ "NamespacedName", "NamespacedNames", "EncryptionKey", + "VolumeTypeEnum", }, ) @@ -98,4 +99,20 @@ class EncryptionKey(proto.Message): ) +class VolumeTypeEnum(proto.Message): + r"""Message to encapsulate VolumeType enum.""" + + class VolumeType(proto.Enum): + r"""Supported volume types. + + Values: + VOLUME_TYPE_UNSPECIFIED (0): + Default + GCE_PERSISTENT_DISK (1): + Compute Engine Persistent Disk volume + """ + VOLUME_TYPE_UNSPECIFIED = 0 + GCE_PERSISTENT_DISK = 1 + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/restore.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/restore.py index c289b65b4c6b..34e9b4ee51ef 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/restore.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/types/restore.py @@ -27,6 +27,8 @@ manifest={ "Restore", "RestoreConfig", + "ResourceSelector", + "VolumeDataRestorePolicyOverride", }, ) @@ -108,6 +110,22 @@ class Restore(proto.Message): that etag in the request to ``UpdateRestore`` or ``DeleteRestore`` to ensure that their change will be applied to the same version of the resource. + filter (google.cloud.gke_backup_v1.types.Restore.Filter): + Optional. Immutable. Filters resources for ``Restore``. If + not specified, the scope of the restore will remain the same + as defined in the ``RestorePlan``. If this is specified, and + no resources are matched by the ``inclusion_filters`` or + everyting is excluded by the ``exclusion_filters``, nothing + will be restored. This filter can only be specified if the + value of + [namespaced_resource_restore_mode][google.cloud.gkebackup.v1.RestoreConfig.namespaced_resource_restore_mode] + is set to ``MERGE_SKIP_ON_CONFLICT``, + ``MERGE_REPLACE_VOLUME_ON_CONFLICT`` or + ``MERGE_REPLACE_ON_CONFLICT``. + volume_data_restore_policy_overrides (MutableSequence[google.cloud.gke_backup_v1.types.VolumeDataRestorePolicyOverride]): + Optional. Immutable. Overrides the volume + data restore policies selected in the Restore + Config for override-scoped resources. """ class State(proto.Enum): @@ -141,6 +159,38 @@ class State(proto.Enum): FAILED = 4 DELETING = 5 + class Filter(proto.Message): + r"""Defines the filter for ``Restore``. This filter can be used to + further refine the resource selection of the ``Restore`` beyond the + coarse-grained scope defined in the ``RestorePlan``. + ``exclusion_filters`` take precedence over ``inclusion_filters``. If + a resource matches both ``inclusion_filters`` and + ``exclusion_filters``, it will not be restored. + + Attributes: + inclusion_filters (MutableSequence[google.cloud.gke_backup_v1.types.ResourceSelector]): + Optional. Selects resources for restoration. If specified, + only resources which match ``inclusion_filters`` will be + selected for restoration. A resource will be selected if it + matches any ``ResourceSelector`` of the + ``inclusion_filters``. + exclusion_filters (MutableSequence[google.cloud.gke_backup_v1.types.ResourceSelector]): + Optional. Excludes resources from restoration. If specified, + a resource will not be restored if it matches any + ``ResourceSelector`` of the ``exclusion_filters``. + """ + + inclusion_filters: MutableSequence["ResourceSelector"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ResourceSelector", + ) + exclusion_filters: MutableSequence["ResourceSelector"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ResourceSelector", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -215,6 +265,18 @@ class State(proto.Enum): proto.STRING, number=17, ) + filter: Filter = proto.Field( + proto.MESSAGE, + number=18, + message=Filter, + ) + volume_data_restore_policy_overrides: MutableSequence[ + "VolumeDataRestorePolicyOverride" + ] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message="VolumeDataRestorePolicyOverride", + ) class RestoreConfig(proto.Message): @@ -296,6 +358,14 @@ class RestoreConfig(proto.Message): matters, as changes made by a rule may impact the filtering logic of subsequent rules. An empty list means no transformation will occur. + volume_data_restore_policy_bindings (MutableSequence[google.cloud.gke_backup_v1.types.RestoreConfig.VolumeDataRestorePolicyBinding]): + Optional. A table that binds volumes by their scope to a + restore policy. Bindings must have a unique scope. Any + volumes not scoped in the bindings are subject to the policy + defined in volume_data_restore_policy. + restore_order (google.cloud.gke_backup_v1.types.RestoreConfig.RestoreOrder): + Optional. RestoreOrder contains custom + ordering to use on a Restore. """ class VolumeDataRestorePolicy(proto.Enum): @@ -380,10 +450,51 @@ class NamespacedResourceRestoreMode(proto.Enum): itself (e.g., because an out of band process creates conflicting resources), a conflict will be reported. + MERGE_SKIP_ON_CONFLICT (3): + This mode merges the backup and the target + cluster and skips the conflicting resources. If + a single resource to restore exists in the + cluster before restoration, the resource will be + skipped, otherwise it will be restored. + MERGE_REPLACE_VOLUME_ON_CONFLICT (4): + This mode merges the backup and the target cluster and skips + the conflicting resources except volume data. If a PVC to + restore already exists, this mode will restore/reconnect the + volume without overwriting the PVC. It is similar to + MERGE_SKIP_ON_CONFLICT except that it will apply the volume + data policy for the conflicting PVCs: + + - RESTORE_VOLUME_DATA_FROM_BACKUP: restore data only and + respect the reclaim policy of the original PV; + - REUSE_VOLUME_HANDLE_FROM_BACKUP: reconnect and respect + the reclaim policy of the original PV; + - NO_VOLUME_DATA_RESTORATION: new provision and respect the + reclaim policy of the original PV. Note that this mode + could cause data loss as the original PV can be retained + or deleted depending on its reclaim policy. + MERGE_REPLACE_ON_CONFLICT (5): + This mode merges the backup and the target + cluster and replaces the conflicting resources + with the ones in the backup. If a single + resource to restore exists in the cluster before + restoration, the resource will be replaced with + the one from the backup. To replace an existing + resource, the first attempt is to update the + resource to match the one from the backup; if + the update fails, the second attempt is to + delete the resource and restore it from the + backup. + Note that this mode could cause data loss as it + replaces the existing resources in the target + cluster, and the original PV can be retained or + deleted depending on its reclaim policy. """ NAMESPACED_RESOURCE_RESTORE_MODE_UNSPECIFIED = 0 DELETE_AND_RESTORE = 1 FAIL_ON_CONFLICT = 2 + MERGE_SKIP_ON_CONFLICT = 3 + MERGE_REPLACE_VOLUME_ON_CONFLICT = 4 + MERGE_REPLACE_ON_CONFLICT = 5 class GroupKind(proto.Message): r"""This is a direct map to the Kubernetes GroupKind type @@ -744,6 +855,81 @@ class TransformationRule(proto.Message): number=3, ) + class VolumeDataRestorePolicyBinding(proto.Message): + r"""Binds resources in the scope to the given + VolumeDataRestorePolicy. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + policy (google.cloud.gke_backup_v1.types.RestoreConfig.VolumeDataRestorePolicy): + Required. The VolumeDataRestorePolicy to + apply when restoring volumes in scope. + volume_type (google.cloud.gke_backup_v1.types.VolumeTypeEnum.VolumeType): + The volume type, as determined by the PVC's + bound PV, to apply the policy to. + + This field is a member of `oneof`_ ``scope``. + """ + + policy: "RestoreConfig.VolumeDataRestorePolicy" = proto.Field( + proto.ENUM, + number=1, + enum="RestoreConfig.VolumeDataRestorePolicy", + ) + volume_type: common.VolumeTypeEnum.VolumeType = proto.Field( + proto.ENUM, + number=2, + oneof="scope", + enum=common.VolumeTypeEnum.VolumeType, + ) + + class RestoreOrder(proto.Message): + r"""Allows customers to specify dependencies between resources + that Backup for GKE can use to compute a resasonable restore + order. + + Attributes: + group_kind_dependencies (MutableSequence[google.cloud.gke_backup_v1.types.RestoreConfig.RestoreOrder.GroupKindDependency]): + Optional. Contains a list of group kind + dependency pairs provided by the customer, that + is used by Backup for GKE to generate a group + kind restore order. + """ + + class GroupKindDependency(proto.Message): + r"""Defines a dependency between two group kinds. + + Attributes: + satisfying (google.cloud.gke_backup_v1.types.RestoreConfig.GroupKind): + Required. The satisfying group kind must be + restored first in order to satisfy the + dependency. + requiring (google.cloud.gke_backup_v1.types.RestoreConfig.GroupKind): + Required. The requiring group kind requires + that the other group kind be restored first. + """ + + satisfying: "RestoreConfig.GroupKind" = proto.Field( + proto.MESSAGE, + number=1, + message="RestoreConfig.GroupKind", + ) + requiring: "RestoreConfig.GroupKind" = proto.Field( + proto.MESSAGE, + number=2, + message="RestoreConfig.GroupKind", + ) + + group_kind_dependencies: MutableSequence[ + "RestoreConfig.RestoreOrder.GroupKindDependency" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RestoreConfig.RestoreOrder.GroupKindDependency", + ) + volume_data_restore_policy: VolumeDataRestorePolicy = proto.Field( proto.ENUM, number=1, @@ -802,6 +988,107 @@ class TransformationRule(proto.Message): number=11, message=TransformationRule, ) + volume_data_restore_policy_bindings: MutableSequence[ + VolumeDataRestorePolicyBinding + ] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=VolumeDataRestorePolicyBinding, + ) + restore_order: RestoreOrder = proto.Field( + proto.MESSAGE, + number=13, + message=RestoreOrder, + ) + + +class ResourceSelector(proto.Message): + r"""Defines a selector to identify a single or a group of + resources. Conditions in the selector are optional, but at least + one field should be set to a non-empty value. If a condition is + not specified, no restrictions will be applied on that + dimension. + If more than one condition is specified, a resource will be + selected if and only if all conditions are met. + + Attributes: + group_kind (google.cloud.gke_backup_v1.types.RestoreConfig.GroupKind): + Optional. Selects resources using their + Kubernetes GroupKinds. If specified, only + resources of provided GroupKind will be + selected. + name (str): + Optional. Selects resources using their + resource names. If specified, only resources + with the provided name will be selected. + namespace (str): + Optional. Selects resources using their namespaces. This + only applies to namespace scoped resources and cannot be + used for selecting cluster scoped resources. If specified, + only resources in the provided namespace will be selected. + If not specified, the filter will apply to both cluster + scoped and namespace scoped resources (e.g. name or label). + The + `Namespace `__ + resource itself will be restored if and only if any + resources within the namespace are restored. + labels (MutableMapping[str, str]): + Optional. Selects resources using Kubernetes + `labels `__. + If specified, a resource will be selected if and only if the + resource has all of the provided labels and all the label + values match. + """ + + group_kind: "RestoreConfig.GroupKind" = proto.Field( + proto.MESSAGE, + number=1, + message="RestoreConfig.GroupKind", + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + namespace: str = proto.Field( + proto.STRING, + number=3, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class VolumeDataRestorePolicyOverride(proto.Message): + r"""Defines an override to apply a VolumeDataRestorePolicy for + scoped resources. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + policy (google.cloud.gke_backup_v1.types.RestoreConfig.VolumeDataRestorePolicy): + Required. The VolumeDataRestorePolicy to + apply when restoring volumes in scope. + selected_pvcs (google.cloud.gke_backup_v1.types.NamespacedNames): + A list of PVCs to apply the policy override + to. + + This field is a member of `oneof`_ ``scope``. + """ + + policy: "RestoreConfig.VolumeDataRestorePolicy" = proto.Field( + proto.ENUM, + number=1, + enum="RestoreConfig.VolumeDataRestorePolicy", + ) + selected_pvcs: common.NamespacedNames = proto.Field( + proto.MESSAGE, + number=2, + oneof="scope", + message=common.NamespacedNames, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py b/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py index d30101e36e9c..007223316713 100644 --- a/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py +++ b/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py @@ -1975,13 +1975,13 @@ def test_list_backup_plans_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backup_plans(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4146,13 +4146,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4336,6 +4336,7 @@ def test_get_backup(request_type, transport: str = "grpc"): description="description_value", pod_count=971, config_backup_size_bytes=2539, + permissive_mode=True, all_namespaces=True, ) response = client.get_backup(request) @@ -4364,6 +4365,7 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.description == "description_value" assert response.pod_count == 971 assert response.config_backup_size_bytes == 2539 + assert response.permissive_mode is True def test_get_backup_empty_call(): @@ -4478,6 +4480,7 @@ async def test_get_backup_empty_call_async(): description="description_value", pod_count=971, config_backup_size_bytes=2539, + permissive_mode=True, ) ) response = await client.get_backup() @@ -4564,6 +4567,7 @@ async def test_get_backup_async( description="description_value", pod_count=971, config_backup_size_bytes=2539, + permissive_mode=True, ) ) response = await client.get_backup(request) @@ -4592,6 +4596,7 @@ async def test_get_backup_async( assert response.description == "description_value" assert response.pod_count == 971 assert response.config_backup_size_bytes == 2539 + assert response.permissive_mode is True @pytest.mark.asyncio @@ -5920,13 +5925,13 @@ def test_list_volume_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_volume_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7362,13 +7367,13 @@ def test_list_restore_plans_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_restore_plans(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9525,13 +9530,13 @@ def test_list_restores_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_restores(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11280,13 +11285,13 @@ def test_list_volume_restores_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_volume_restores(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12329,6 +12334,7 @@ def test_create_backup_plan_rest(request_type): "encryption_key": { "gcp_kms_encryption_key": "gcp_kms_encryption_key_value" }, + "permissive_mode": True, }, "protected_pod_count": 2036, "state": 1, @@ -13499,6 +13505,7 @@ def test_update_backup_plan_rest(request_type): "encryption_key": { "gcp_kms_encryption_key": "gcp_kms_encryption_key_value" }, + "permissive_mode": True, }, "protected_pod_count": 2036, "state": 1, @@ -14229,6 +14236,7 @@ def test_create_backup_rest(request_type): "description": "description_value", "pod_count": 971, "config_backup_size_bytes": 2539, + "permissive_mode": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -15002,6 +15010,7 @@ def test_get_backup_rest(request_type): description="description_value", pod_count=971, config_backup_size_bytes=2539, + permissive_mode=True, all_namespaces=True, ) @@ -15034,6 +15043,7 @@ def test_get_backup_rest(request_type): assert response.description == "description_value" assert response.pod_count == 971 assert response.config_backup_size_bytes == 2539 + assert response.permissive_mode is True def test_get_backup_rest_use_cached_wrapped_rpc(): @@ -15354,6 +15364,7 @@ def test_update_backup_rest(request_type): "description": "description_value", "pod_count": 971, "config_backup_size_bytes": 2539, + "permissive_mode": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -16823,6 +16834,10 @@ def test_create_restore_plan_rest(request_type): "description": "description_value", } ], + "volume_data_restore_policy_bindings": [{"policy": 1, "volume_type": 1}], + "restore_order": { + "group_kind_dependencies": [{"satisfying": {}, "requiring": {}}] + }, }, "labels": {}, "etag": "etag_value", @@ -18002,6 +18017,10 @@ def test_update_restore_plan_rest(request_type): "description": "description_value", } ], + "volume_data_restore_policy_bindings": [{"policy": 1, "volume_type": 1}], + "restore_order": { + "group_kind_dependencies": [{"satisfying": {}, "requiring": {}}] + }, }, "labels": {}, "etag": "etag_value", @@ -18770,6 +18789,10 @@ def test_create_restore_rest(request_type): "description": "description_value", } ], + "volume_data_restore_policy_bindings": [{"policy": 1, "volume_type": 1}], + "restore_order": { + "group_kind_dependencies": [{"satisfying": {}, "requiring": {}}] + }, }, "labels": {}, "state": 1, @@ -18780,6 +18803,18 @@ def test_create_restore_rest(request_type): "resources_failed_count": 2343, "volumes_restored_count": 2394, "etag": "etag_value", + "filter": { + "inclusion_filters": [ + { + "group_kind": {}, + "name": "name_value", + "namespace": "namespace_value", + "labels": {}, + } + ], + "exclusion_filters": {}, + }, + "volume_data_restore_policy_overrides": [{"policy": 1, "selected_pvcs": {}}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -19948,6 +19983,10 @@ def test_update_restore_rest(request_type): "description": "description_value", } ], + "volume_data_restore_policy_bindings": [{"policy": 1, "volume_type": 1}], + "restore_order": { + "group_kind_dependencies": [{"satisfying": {}, "requiring": {}}] + }, }, "labels": {}, "state": 1, @@ -19958,6 +19997,18 @@ def test_update_restore_rest(request_type): "resources_failed_count": 2343, "volumes_restored_count": 2394, "etag": "etag_value", + "filter": { + "inclusion_filters": [ + { + "group_kind": {}, + "name": "name_value", + "namespace": "namespace_value", + "labels": {}, + } + ], + "exclusion_filters": {}, + }, + "volume_data_restore_policy_overrides": [{"policy": 1, "selected_pvcs": {}}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-gke-hub/CHANGELOG.md b/packages/google-cloud-gke-hub/CHANGELOG.md index 9ae00f06b5a6..7273ff9d7a79 100644 --- a/packages/google-cloud-gke-hub/CHANGELOG.md +++ b/packages/google-cloud-gke-hub/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-hub-v1.13.1...google-cloud-gke-hub-v1.14.0) (2024-06-19) + + +### Features + +* add a new field `cluster` under `MembershipSpec` to support Config Sync cluster name selector ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* add a new field `enabled` under `ConfigSync` to support Config Sync installation ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* add a new field `gcp_service_account_email` under `ConfigSync` to exporting metrics ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* add a new field `management` under `MembershipSpec` to support auto upgrade ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* add a new field `oci` to support OCI repo configuration ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* add a new field `PENDING` under `DeploymentState` enum ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* add a new field `prevent_drift` under `ConfigSync` to support Config Sync admission webhook drift prevention ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) + + +### Documentation + +* update comment for field `cluster_name` ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* update comment for field `gcp_service_account_email` ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* update comment for field `secret_type` ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* update comment for field `secret_type` ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) +* update comment for field `sync_state` ([319d012](https://github.com/googleapis/google-cloud-python/commit/319d01203396b85b8e725e614425c61bb7db943d)) + ## [1.13.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-hub-v1.13.0...google-cloud-gke-hub-v1.13.1) (2024-03-05) diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/__init__.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/__init__.py index b338df39d187..38de3abdf403 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/__init__.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/__init__.py @@ -21,6 +21,7 @@ from .types.configmanagement import ( ConfigSync, ConfigSyncDeploymentState, + ConfigSyncError, ConfigSyncState, ConfigSyncVersion, DeploymentState, @@ -35,6 +36,7 @@ InstallError, MembershipSpec, MembershipState, + OciConfig, OperatorState, PolicyController, PolicyControllerState, @@ -46,6 +48,7 @@ __all__ = ( "ConfigSync", "ConfigSyncDeploymentState", + "ConfigSyncError", "ConfigSyncState", "ConfigSyncVersion", "DeploymentState", @@ -60,6 +63,7 @@ "InstallError", "MembershipSpec", "MembershipState", + "OciConfig", "OperatorState", "PolicyController", "PolicyControllerState", diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/__init__.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/__init__.py index 12e7b7fb338d..39c74d1a31f3 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/__init__.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/__init__.py @@ -16,6 +16,7 @@ from .configmanagement import ( ConfigSync, ConfigSyncDeploymentState, + ConfigSyncError, ConfigSyncState, ConfigSyncVersion, DeploymentState, @@ -30,6 +31,7 @@ InstallError, MembershipSpec, MembershipState, + OciConfig, OperatorState, PolicyController, PolicyControllerState, @@ -41,6 +43,7 @@ __all__ = ( "ConfigSync", "ConfigSyncDeploymentState", + "ConfigSyncError", "ConfigSyncState", "ConfigSyncVersion", "ErrorResource", @@ -54,6 +57,7 @@ "InstallError", "MembershipSpec", "MembershipState", + "OciConfig", "OperatorState", "PolicyController", "PolicyControllerState", diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/configmanagement.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/configmanagement.py index 6ee5b1a68ae6..15eb758e4b44 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/configmanagement.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/types/configmanagement.py @@ -28,6 +28,7 @@ "MembershipSpec", "ConfigSync", "GitConfig", + "OciConfig", "PolicyController", "HierarchyControllerConfig", "HierarchyControllerDeploymentState", @@ -36,6 +37,7 @@ "OperatorState", "InstallError", "ConfigSyncState", + "ConfigSyncError", "ConfigSyncVersion", "ConfigSyncDeploymentState", "SyncState", @@ -63,11 +65,14 @@ class DeploymentState(proto.Enum): ERROR (3): Deployment was attempted to be installed, but has errors + PENDING (4): + Deployment is installing or terminating """ DEPLOYMENT_STATE_UNSPECIFIED = 0 NOT_INSTALLED = 1 INSTALLED = 2 ERROR = 3 + PENDING = 4 class MembershipState(proto.Message): @@ -75,12 +80,9 @@ class MembershipState(proto.Message): Attributes: cluster_name (str): - The user-defined name for the cluster used by - ClusterSelectors to group clusters together. This should - match Membership's membership_name, unless the user - installed ACM on the cluster manually prior to enabling the - ACM hub feature. Unique within a Anthos Config Management - installation. + This field is set to the ``cluster_name`` field of the + Membership Spec if it is not empty. Otherwise, it is set to + the cluster's fleet membership name. membership_spec (google.cloud.gkehub.configmanagement_v1.types.MembershipSpec): Membership configuration in the cluster. This represents the actual state in the cluster, @@ -142,8 +144,40 @@ class MembershipSpec(proto.Message): cluster. version (str): Version of ACM installed. + cluster (str): + The user-specified cluster name used by + Config Sync cluster-name-selector annotation or + ClusterSelector, for applying configs to only a + subset of clusters. + Omit this field if the cluster's fleet + membership name is used by Config Sync + cluster-name-selector annotation or + ClusterSelector. Set this field if a name + different from the cluster's fleet membership + name is used by Config Sync + cluster-name-selector annotation or + ClusterSelector. + management (google.cloud.gkehub.configmanagement_v1.types.MembershipSpec.Management): + Enables automatic Feature management. """ + class Management(proto.Enum): + r"""Whether to automatically manage the Feature. + + Values: + MANAGEMENT_UNSPECIFIED (0): + Unspecified + MANAGEMENT_AUTOMATIC (1): + Google will manage the Feature for the + cluster. + MANAGEMENT_MANUAL (2): + User will manually manage the Feature for the + cluster. + """ + MANAGEMENT_UNSPECIFIED = 0 + MANAGEMENT_AUTOMATIC = 1 + MANAGEMENT_MANUAL = 2 + config_sync: "ConfigSync" = proto.Field( proto.MESSAGE, number=1, @@ -163,17 +197,54 @@ class MembershipSpec(proto.Message): proto.STRING, number=10, ) + cluster: str = proto.Field( + proto.STRING, + number=11, + ) + management: Management = proto.Field( + proto.ENUM, + number=12, + enum=Management, + ) class ConfigSync(proto.Message): r"""Configuration for Config Sync + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: git (google.cloud.gkehub.configmanagement_v1.types.GitConfig): Git repo configuration for the cluster. source_format (str): Specifies whether the Config Sync Repo is - in “hierarchical” or “unstructured” mode. + in "hierarchical" or "unstructured" mode. + enabled (bool): + Enables the installation of ConfigSync. + If set to true, ConfigSync resources will be + created and the other ConfigSync fields will be + applied if exist. + If set to false, all other ConfigSync fields + will be ignored, ConfigSync resources will be + deleted. + If omitted, ConfigSync resources will be managed + depends on the presence of the git or oci field. + + This field is a member of `oneof`_ ``_enabled``. + prevent_drift (bool): + Set to true to enable the Config Sync admission webhook to + prevent drifts. If set to ``false``, disables the Config + Sync admission webhook and does not prevent drifts. + oci (google.cloud.gkehub.configmanagement_v1.types.OciConfig): + OCI repo configuration for the cluster + metrics_gcp_service_account_email (str): + The Email of the Google Cloud Service Account (GSA) used for + exporting Config Sync metrics to Cloud Monitoring when + Workload Identity is enabled. The GSA should have the + Monitoring Metric Writer (roles/monitoring.metricWriter) IAM + role. The Kubernetes ServiceAccount ``default`` in the + namespace ``config-management-monitoring`` should be bound + to the GSA. """ git: "GitConfig" = proto.Field( @@ -185,6 +256,24 @@ class ConfigSync(proto.Message): proto.STRING, number=8, ) + enabled: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + prevent_drift: bool = proto.Field( + proto.BOOL, + number=11, + ) + oci: "OciConfig" = proto.Field( + proto.MESSAGE, + number=12, + message="OciConfig", + ) + metrics_gcp_service_account_email: str = proto.Field( + proto.STRING, + number=15, + ) class GitConfig(proto.Message): @@ -209,13 +298,15 @@ class GitConfig(proto.Message): Default HEAD. secret_type (str): Type of secret configured for access to the - Git repo. + Git repo. Must be one of ssh, cookiefile, + gcenode, token, gcpserviceaccount or none. The + validation of this is case-sensitive. Required. https_proxy (str): URL for the HTTPS proxy to be used when communicating with the Git repo. gcp_service_account_email (str): - The GCP Service Account Email used for auth when secret_type - is gcpServiceAccount. + The Google Cloud Service Account Email used for auth when + secret_type is gcpServiceAccount. """ sync_repo: str = proto.Field( @@ -252,6 +343,51 @@ class GitConfig(proto.Message): ) +class OciConfig(proto.Message): + r"""OCI repo configuration for a single cluster + + Attributes: + sync_repo (str): + The OCI image repository URL for the package to sync from. + e.g. + ``LOCATION-docker.pkg.dev/PROJECT_ID/REPOSITORY_NAME/PACKAGE_NAME``. + policy_dir (str): + The absolute path of the directory that + contains the local resources. Default: the root + directory of the image. + sync_wait_secs (int): + Period in seconds between consecutive syncs. + Default: 15. + secret_type (str): + Type of secret configured for access to the + Git repo. + gcp_service_account_email (str): + The Google Cloud Service Account Email used for auth when + secret_type is gcpServiceAccount. + """ + + sync_repo: str = proto.Field( + proto.STRING, + number=1, + ) + policy_dir: str = proto.Field( + proto.STRING, + number=2, + ) + sync_wait_secs: int = proto.Field( + proto.INT64, + number=3, + ) + secret_type: str = proto.Field( + proto.STRING, + number=4, + ) + gcp_service_account_email: str = proto.Field( + proto.STRING, + number=5, + ) + + class PolicyController(proto.Message): r"""Configuration for Policy Controller @@ -464,8 +600,63 @@ class ConfigSyncState(proto.Message): sync_state (google.cloud.gkehub.configmanagement_v1.types.SyncState): The state of ConfigSync's process to sync configs to a cluster + errors (MutableSequence[google.cloud.gkehub.configmanagement_v1.types.ConfigSyncError]): + Errors pertaining to the installation of + Config Sync. + rootsync_crd (google.cloud.gkehub.configmanagement_v1.types.ConfigSyncState.CRDState): + The state of the RootSync CRD + reposync_crd (google.cloud.gkehub.configmanagement_v1.types.ConfigSyncState.CRDState): + The state of the Reposync CRD + state (google.cloud.gkehub.configmanagement_v1.types.ConfigSyncState.State): + The state of CS + This field summarizes the other fields in this + message. """ + class CRDState(proto.Enum): + r"""CRDState representing the state of a CRD + + Values: + CRD_STATE_UNSPECIFIED (0): + CRD's state cannot be determined + NOT_INSTALLED (1): + CRD is not installed + INSTALLED (2): + CRD is installed + TERMINATING (3): + CRD is terminating (i.e., it has been deleted + and is cleaning up) + INSTALLING (4): + CRD is installing + """ + CRD_STATE_UNSPECIFIED = 0 + NOT_INSTALLED = 1 + INSTALLED = 2 + TERMINATING = 3 + INSTALLING = 4 + + class State(proto.Enum): + r""" + + Values: + STATE_UNSPECIFIED (0): + CS's state cannot be determined. + CONFIG_SYNC_NOT_INSTALLED (1): + CS is not installed. + CONFIG_SYNC_INSTALLED (2): + The expected CS version is installed + successfully. + CONFIG_SYNC_ERROR (3): + CS encounters errors. + CONFIG_SYNC_PENDING (4): + CS is installing or terminating. + """ + STATE_UNSPECIFIED = 0 + CONFIG_SYNC_NOT_INSTALLED = 1 + CONFIG_SYNC_INSTALLED = 2 + CONFIG_SYNC_ERROR = 3 + CONFIG_SYNC_PENDING = 4 + version: "ConfigSyncVersion" = proto.Field( proto.MESSAGE, number=1, @@ -481,6 +672,41 @@ class ConfigSyncState(proto.Message): number=3, message="SyncState", ) + errors: MutableSequence["ConfigSyncError"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="ConfigSyncError", + ) + rootsync_crd: CRDState = proto.Field( + proto.ENUM, + number=5, + enum=CRDState, + ) + reposync_crd: CRDState = proto.Field( + proto.ENUM, + number=6, + enum=CRDState, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + + +class ConfigSyncError(proto.Message): + r"""Errors pertaining to the installation of Config Sync + + Attributes: + error_message (str): + A string representing the user facing error + message + """ + + error_message: str = proto.Field( + proto.STRING, + number=1, + ) class ConfigSyncVersion(proto.Message): @@ -502,6 +728,8 @@ class ConfigSyncVersion(proto.Message): root_reconciler (str): Version of the deployed reconciler container in root-reconciler pod + admission_webhook (str): + Version of the deployed admission_webhook pod """ importer: str = proto.Field( @@ -528,6 +756,10 @@ class ConfigSyncVersion(proto.Message): proto.STRING, number=6, ) + admission_webhook: str = proto.Field( + proto.STRING, + number=7, + ) class ConfigSyncDeploymentState(proto.Message): @@ -546,6 +778,8 @@ class ConfigSyncDeploymentState(proto.Message): Deployment state of reconciler-manager pod root_reconciler (google.cloud.gkehub.configmanagement_v1.types.DeploymentState): Deployment state of root-reconciler + admission_webhook (google.cloud.gkehub.configmanagement_v1.types.DeploymentState): + Deployment state of admission-webhook """ importer: "DeploymentState" = proto.Field( @@ -578,6 +812,11 @@ class ConfigSyncDeploymentState(proto.Message): number=6, enum="DeploymentState", ) + admission_webhook: "DeploymentState" = proto.Field( + proto.ENUM, + number=7, + enum="DeploymentState", + ) class SyncState(proto.Message): @@ -608,27 +847,26 @@ class SyncState(proto.Message): """ class SyncCode(proto.Enum): - r"""An enum representing an ACM's status syncing configs to a - cluster + r"""An enum representing Config Sync's status of syncing configs + to a cluster. Values: SYNC_CODE_UNSPECIFIED (0): - ACM cannot determine a sync code + Config Sync cannot determine a sync code SYNCED (1): - ACM successfully synced the git Repo with the - cluster + Config Sync successfully synced the git Repo + with the cluster PENDING (2): - ACM is in the progress of syncing a new - change + Config Sync is in the progress of syncing a + new change ERROR (3): - Indicates an error configuring ACM, and user - action is required + Indicates an error configuring Config Sync, + and user action is required NOT_CONFIGURED (4): - ACM has been installed (operator manifest - deployed), but not configured. + Config Sync has been installed but not + configured NOT_INSTALLED (5): - ACM has not been installed (no operator pod - found) + Config Sync has not been installed UNAUTHORIZED (6): Error authorizing with the cluster UNREACHABLE (7): diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json index f5d7aaa2ae7a..b0990ab57d74 100644 --- a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json +++ b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-hub", - "version": "0.1.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json index 5f7f541e37e3..dd844d9234f6 100644 --- a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json +++ b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-hub", - "version": "0.1.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py index f1c7963ebd83..cb4e510fe457 100644 --- a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py +++ b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py @@ -1480,13 +1480,13 @@ def test_list_memberships_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_memberships(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2045,13 +2045,13 @@ def test_list_features_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_features(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py index ff0869d27837..50ce8c7942d4 100644 --- a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py +++ b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py @@ -1625,13 +1625,13 @@ def test_list_memberships_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_memberships(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py index 8ebdaa033b52..558c8aab67c5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py index 8ebdaa033b52..558c8aab67c5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json index e5e46df7d162..08dd05577dce 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-multicloud", - "version": "0.6.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index 07640bee8cb8..26770ad89bd0 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -3257,13 +3257,13 @@ def test_list_attached_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_attached_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py index ec766359ea21..6377a34186c5 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py @@ -2745,13 +2745,13 @@ def test_list_aws_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_aws_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5993,13 +5993,13 @@ def test_list_aws_node_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_aws_node_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py index bbfb426ce85a..9ba683233aea 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py @@ -2370,13 +2370,13 @@ def test_list_azure_clients_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_azure_clients(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4603,13 +4603,13 @@ def test_list_azure_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_azure_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7472,13 +7472,13 @@ def test_list_azure_node_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_azure_node_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py index 4fba900660ea..e9a1940a07fc 100644 --- a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py +++ b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py @@ -3112,13 +3112,13 @@ def test_list_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py b/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py index b80cc892259c..0da1eac42555 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py @@ -1435,13 +1435,13 @@ def test_list_service_accounts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_service_accounts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9510,10 +9510,10 @@ def test_query_grantable_roles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.query_grantable_roles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9929,10 +9929,10 @@ def test_list_roles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_roles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11936,10 +11936,10 @@ def test_query_testable_permissions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.query_testable_permissions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py index 8403e9762792..a4dd2b11ee84 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py @@ -1482,13 +1482,13 @@ def test_list_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py index 7c3a0a10f98c..1f3ea3b6bb4f 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py @@ -1459,13 +1459,13 @@ def test_list_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py index 3e92f85005c3..461e667b8a78 100644 --- a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py @@ -3179,13 +3179,13 @@ def test_list_tunnel_dest_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tunnel_dest_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py index 264e7e67dc0d..0663cb514033 100644 --- a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py @@ -2755,13 +2755,13 @@ def test_list_identity_aware_proxy_clients_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_identity_aware_proxy_clients(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py index 451621429bf1..f61f422df53a 100644 --- a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py +++ b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py @@ -1448,13 +1448,13 @@ def test_list_endpoints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_endpoints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py index a0954160c006..84b8fe8dacba 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py @@ -1596,13 +1596,13 @@ def test_list_crypto_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_crypto_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py index 4d3c8ee50fa3..0118a94954cc 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py @@ -2007,13 +2007,13 @@ def test_search_protected_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_protected_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-kms/CHANGELOG.md b/packages/google-cloud-kms/CHANGELOG.md index fac2806e7101..37faf3661838 100644 --- a/packages/google-cloud-kms/CHANGELOG.md +++ b/packages/google-cloud-kms/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-kms/#history +## [2.24.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.23.0...google-cloud-kms-v2.24.0) (2024-06-24) + + +### Features + +* support Key Access Justifications policy configuration ([6945437](https://github.com/googleapis/google-cloud-python/commit/69454372b112a4fc08cd6ff1fcd0583333b22eef)) + ## [2.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.22.0...google-cloud-kms-v2.23.0) (2024-05-16) diff --git a/packages/google-cloud-kms/google/cloud/kms/__init__.py b/packages/google-cloud-kms/google/cloud/kms/__init__.py index 58112b29bdc9..fa83dff6b3b7 100644 --- a/packages/google-cloud-kms/google/cloud/kms/__init__.py +++ b/packages/google-cloud-kms/google/cloud/kms/__init__.py @@ -62,11 +62,13 @@ VerifyConnectivityResponse, ) from google.cloud.kms_v1.types.resources import ( + AccessReason, CryptoKey, CryptoKeyVersion, CryptoKeyVersionTemplate, ExternalProtectionLevelOptions, ImportJob, + KeyAccessJustificationsPolicy, KeyOperationAttestation, KeyRing, ProtectionLevel, @@ -155,9 +157,11 @@ "CryptoKeyVersionTemplate", "ExternalProtectionLevelOptions", "ImportJob", + "KeyAccessJustificationsPolicy", "KeyOperationAttestation", "KeyRing", "PublicKey", + "AccessReason", "ProtectionLevel", "AsymmetricDecryptRequest", "AsymmetricDecryptResponse", diff --git a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py index 411b87d337df..e4ff6d180bec 100644 --- a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.24.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py b/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py index fc211477af91..bd9152eabb1b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py @@ -55,11 +55,13 @@ VerifyConnectivityResponse, ) from .types.resources import ( + AccessReason, CryptoKey, CryptoKeyVersion, CryptoKeyVersionTemplate, ExternalProtectionLevelOptions, ImportJob, + KeyAccessJustificationsPolicy, KeyOperationAttestation, KeyRing, ProtectionLevel, @@ -116,6 +118,7 @@ "AutokeyAsyncClient", "EkmServiceAsyncClient", "KeyManagementServiceAsyncClient", + "AccessReason", "AsymmetricDecryptRequest", "AsymmetricDecryptResponse", "AsymmetricSignRequest", @@ -157,6 +160,7 @@ "GetPublicKeyRequest", "ImportCryptoKeyVersionRequest", "ImportJob", + "KeyAccessJustificationsPolicy", "KeyHandle", "KeyManagementServiceClient", "KeyOperationAttestation", diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py index 411b87d337df..e4ff6d180bec 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.24.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py index d4354353dd63..7bd4971dac39 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py @@ -43,11 +43,13 @@ VerifyConnectivityResponse, ) from .resources import ( + AccessReason, CryptoKey, CryptoKeyVersion, CryptoKeyVersionTemplate, ExternalProtectionLevelOptions, ImportJob, + KeyAccessJustificationsPolicy, KeyOperationAttestation, KeyRing, ProtectionLevel, @@ -128,9 +130,11 @@ "CryptoKeyVersionTemplate", "ExternalProtectionLevelOptions", "ImportJob", + "KeyAccessJustificationsPolicy", "KeyOperationAttestation", "KeyRing", "PublicKey", + "AccessReason", "ProtectionLevel", "AsymmetricDecryptRequest", "AsymmetricDecryptResponse", diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index 03cb174aa038..d7f70db29d1a 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -26,6 +26,7 @@ package="google.cloud.kms.v1", manifest={ "ProtectionLevel", + "AccessReason", "KeyRing", "CryptoKey", "CryptoKeyVersionTemplate", @@ -34,6 +35,7 @@ "PublicKey", "ImportJob", "ExternalProtectionLevelOptions", + "KeyAccessJustificationsPolicy", }, ) @@ -66,6 +68,90 @@ class ProtectionLevel(proto.Enum): EXTERNAL_VPC = 4 +class AccessReason(proto.Enum): + r"""Describes the reason for a data access. Please refer to + https://cloud.google.com/assured-workloads/key-access-justifications/docs/justification-codes + for the detailed semantic meaning of justification reason codes. + + Values: + REASON_UNSPECIFIED (0): + Unspecified access reason. + CUSTOMER_INITIATED_SUPPORT (1): + Customer-initiated support. + GOOGLE_INITIATED_SERVICE (2): + Google-initiated access for system management + and troubleshooting. + THIRD_PARTY_DATA_REQUEST (3): + Google-initiated access in response to a + legal request or legal process. + GOOGLE_INITIATED_REVIEW (4): + Google-initiated access for security, fraud, + abuse, or compliance purposes. + CUSTOMER_INITIATED_ACCESS (5): + Customer uses their account to perform any + access to their own data which their IAM policy + authorizes. + GOOGLE_INITIATED_SYSTEM_OPERATION (6): + Google systems access customer data to help + optimize the structure of the data or quality + for future uses by the customer. + REASON_NOT_EXPECTED (7): + No reason is expected for this key request. + MODIFIED_CUSTOMER_INITIATED_ACCESS (8): + Customer uses their account to perform any access to their + own data which their IAM policy authorizes, and one of the + following is true: + + - A Google administrator has reset the root-access account + associated with the user's organization within the past 7 + days. + - A Google-initiated emergency access operation has + interacted with a resource in the same project or folder + as the currently accessed resource within the past 7 + days. + MODIFIED_GOOGLE_INITIATED_SYSTEM_OPERATION (9): + Google systems access customer data to help optimize the + structure of the data or quality for future uses by the + customer, and one of the following is true: + + - A Google administrator has reset the root-access account + associated with the user's organization within the past 7 + days. + - A Google-initiated emergency access operation has + interacted with a resource in the same project or folder + as the currently accessed resource within the past 7 + days. + GOOGLE_RESPONSE_TO_PRODUCTION_ALERT (10): + Google-initiated access to maintain system + reliability. + CUSTOMER_AUTHORIZED_WORKFLOW_SERVICING (11): + One of the following operations is being executed while + simultaneously encountering an internal technical issue + which prevented a more precise justification code from being + generated: + + - Your account has been used to perform any access to your + own data which your IAM policy authorizes. + - An automated Google system operates on encrypted customer + data which your IAM policy authorizes. + - Customer-initiated Google support access. + - Google-initiated support access to protect system + reliability. + """ + REASON_UNSPECIFIED = 0 + CUSTOMER_INITIATED_SUPPORT = 1 + GOOGLE_INITIATED_SERVICE = 2 + THIRD_PARTY_DATA_REQUEST = 3 + GOOGLE_INITIATED_REVIEW = 4 + CUSTOMER_INITIATED_ACCESS = 5 + GOOGLE_INITIATED_SYSTEM_OPERATION = 6 + REASON_NOT_EXPECTED = 7 + MODIFIED_CUSTOMER_INITIATED_ACCESS = 8 + MODIFIED_GOOGLE_INITIATED_SYSTEM_OPERATION = 9 + GOOGLE_RESPONSE_TO_PRODUCTION_ALERT = 10 + CUSTOMER_AUTHORIZED_WORKFLOW_SERVICING = 11 + + class KeyRing(proto.Message): r"""A [KeyRing][google.cloud.kms.v1.KeyRing] is a toplevel logical grouping of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. @@ -208,6 +294,19 @@ class CryptoKey(proto.Message): is non-exhaustive and may apply to additional [ProtectionLevels][google.cloud.kms.v1.ProtectionLevel] in the future. + key_access_justifications_policy (google.cloud.kms_v1.types.KeyAccessJustificationsPolicy): + Optional. The policy used for Key Access + Justifications Policy Enforcement. If this field + is present and this key is enrolled in Key + Access Justifications Policy Enforcement, the + policy will be evaluated in encrypt, decrypt, + and sign operations, and the operation will fail + if rejected by the policy. The policy is defined + by specifying zero or more allowed justification + codes. + https://cloud.google.com/assured-workloads/key-access-justifications/docs/justification-codes + By default, this field is absent, and all + justification codes are allowed. """ class CryptoKeyPurpose(proto.Enum): @@ -313,6 +412,11 @@ class CryptoKeyPurpose(proto.Enum): proto.STRING, number=15, ) + key_access_justifications_policy: "KeyAccessJustificationsPolicy" = proto.Field( + proto.MESSAGE, + number=17, + message="KeyAccessJustificationsPolicy", + ) class CryptoKeyVersionTemplate(proto.Message): @@ -1251,4 +1355,29 @@ class ExternalProtectionLevelOptions(proto.Message): ) +class KeyAccessJustificationsPolicy(proto.Message): + r"""A + [KeyAccessJustificationsPolicy][google.cloud.kms.v1.KeyAccessJustificationsPolicy] + specifies zero or more allowed + [AccessReason][google.cloud.kms.v1.AccessReason] values for encrypt, + decrypt, and sign operations on a + [CryptoKey][google.cloud.kms.v1.CryptoKey]. + + Attributes: + allowed_access_reasons (MutableSequence[google.cloud.kms_v1.types.AccessReason]): + The list of allowed reasons for access to a + [CryptoKey][google.cloud.kms.v1.CryptoKey]. Zero allowed + access reasons means all encrypt, decrypt, and sign + operations for the + [CryptoKey][google.cloud.kms.v1.CryptoKey] associated with + this policy will fail. + """ + + allowed_access_reasons: MutableSequence["AccessReason"] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="AccessReason", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index cf29f9ff0cf2..4f9aea7ed3e6 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms", - "version": "2.23.0" + "version": "2.24.0" }, "snippets": [ { diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py index a33d6343b314..190da882a01b 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py @@ -1526,13 +1526,13 @@ def test_list_ekm_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_ekm_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py index bcb03230bb12..1dcad5fcb794 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py @@ -1613,13 +1613,13 @@ def test_list_key_rings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_key_rings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2185,13 +2185,13 @@ def test_list_crypto_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_crypto_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2780,13 +2780,13 @@ def test_list_crypto_key_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_crypto_key_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3358,13 +3358,13 @@ def test_list_import_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_import_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16725,6 +16725,7 @@ def test_create_crypto_key_rest(request_type): "import_only": True, "destroy_scheduled_duration": {}, "crypto_key_backend": "crypto_key_backend_value", + "key_access_justifications_policy": {"allowed_access_reasons": [1]}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -18371,6 +18372,7 @@ def test_update_crypto_key_rest(request_type): "import_only": True, "destroy_scheduled_duration": {}, "crypto_key_backend": "crypto_key_backend_value", + "key_access_justifications_policy": {"allowed_access_reasons": [1]}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-life-sciences/CHANGELOG.md b/packages/google-cloud-life-sciences/CHANGELOG.md index 8b712db15357..9851061c179f 100644 --- a/packages/google-cloud-life-sciences/CHANGELOG.md +++ b/packages/google-cloud-life-sciences/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.9.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-life-sciences-v0.9.9...google-cloud-life-sciences-v0.9.10) (2024-06-27) + + +### Documentation + +* [google-cloud-life-sciences] modify example accelerator type ([#12840](https://github.com/googleapis/google-cloud-python/issues/12840)) ([9210610](https://github.com/googleapis/google-cloud-python/commit/9210610dd2f6593dfe1c14039f9024ead8d19795)) + ## [0.9.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-life-sciences-v0.9.8...google-cloud-life-sciences-v0.9.9) (2024-03-05) diff --git a/packages/google-cloud-life-sciences/README.rst b/packages/google-cloud-life-sciences/README.rst index 83ac6d4c1a8c..3736eb8ede06 100644 --- a/packages/google-cloud-life-sciences/README.rst +++ b/packages/google-cloud-life-sciences/README.rst @@ -15,7 +15,7 @@ Python Client for Cloud Life Sciences .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-life-sciences.svg :target: https://pypi.org/project/google-cloud-life-sciences/ .. _Cloud Life Sciences: https://cloud.google.com/life-sciences/ -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/lifesciences/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/lifesciences/latest/summary_overview .. _Product Documentation: https://cloud.google.com/life-sciences/ Quick Start diff --git a/packages/google-cloud-life-sciences/docs/index.rst b/packages/google-cloud-life-sciences/docs/index.rst index 74879abb045b..49a906cbb188 100644 --- a/packages/google-cloud-life-sciences/docs/index.rst +++ b/packages/google-cloud-life-sciences/docs/index.rst @@ -21,3 +21,8 @@ For a list of all ``google-cloud-life-sciences`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-life-sciences/docs/summary_overview.md b/packages/google-cloud-life-sciences/docs/summary_overview.md new file mode 100644 index 000000000000..393b50ba42ad --- /dev/null +++ b/packages/google-cloud-life-sciences/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Cloud Life Sciences API + +Overview of the APIs available for Cloud Life Sciences API. + +## All entries + +Classes, methods and properties & attributes for +Cloud Life Sciences API. + +[classes](https://cloud.google.com/python/docs/reference/lifesciences/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/lifesciences/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/lifesciences/latest/summary_property.html) diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py index 558c8aab67c5..07e174dd263b 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.9.10" # {x-release-please-version} diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py index 558c8aab67c5..07e174dd263b 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.9.10" # {x-release-please-version} diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/types/workflows.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/types/workflows.py index 61e954e5ea33..e1e7795a3f56 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/types/workflows.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/types/workflows.py @@ -740,7 +740,7 @@ class Accelerator(proto.Message): Attributes: type_ (str): The accelerator type string (for example, - "nvidia-tesla-k80"). + "nvidia-tesla-t4"). Only NVIDIA GPU accelerators are currently supported. If an NVIDIA GPU is attached, the required runtime libraries will diff --git a/packages/google-cloud-life-sciences/noxfile.py b/packages/google-cloud-life-sciences/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-life-sciences/noxfile.py +++ b/packages/google-cloud-life-sciences/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json b/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json index a5f3a0632b0b..3a19c1405939 100644 --- a/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json +++ b/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-life-sciences", - "version": "0.1.0" + "version": "0.9.10" }, "snippets": [ { diff --git a/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py b/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py index 5a6e42f46128..b62392282002 100644 --- a/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py +++ b/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py @@ -2382,13 +2382,13 @@ def test_list_domains_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_domains(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-managedkafka/.OwlBot.yaml b/packages/google-cloud-managedkafka/.OwlBot.yaml new file mode 100644 index 000000000000..c870b8a493df --- /dev/null +++ b/packages/google-cloud-managedkafka/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/managedkafka/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-managedkafka/$1 +api-name: google-cloud-managedkafka diff --git a/packages/google-cloud-managedkafka/.coveragerc b/packages/google-cloud-managedkafka/.coveragerc new file mode 100644 index 000000000000..a862c1bb99ef --- /dev/null +++ b/packages/google-cloud-managedkafka/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/managedkafka/__init__.py + google/cloud/managedkafka/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-managedkafka/.flake8 b/packages/google-cloud-managedkafka/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-managedkafka/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-managedkafka/.gitignore b/packages/google-cloud-managedkafka/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-managedkafka/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-managedkafka/.repo-metadata.json b/packages/google-cloud-managedkafka/.repo-metadata.json new file mode 100644 index 000000000000..bae310658c84 --- /dev/null +++ b/packages/google-cloud-managedkafka/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-managedkafka", + "name_pretty": "Apache Kafka for BigQuery API", + "api_description": "Apache Kafka for BigQuery is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud.", + "product_documentation": "https://cloud.google.com/managed-kafka", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1376234", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-managedkafka", + "api_id": "managedkafka.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "managedkafka" +} diff --git a/packages/google-cloud-managedkafka/CHANGELOG.md b/packages/google-cloud-managedkafka/CHANGELOG.md new file mode 100644 index 000000000000..6b08cf64bbd7 --- /dev/null +++ b/packages/google-cloud-managedkafka/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-06-10) + + +### Features + +* add initial files for google.cloud.managedkafka.v1 ([#12781](https://github.com/googleapis/google-cloud-python/issues/12781)) ([e05d380](https://github.com/googleapis/google-cloud-python/commit/e05d380453ee3555ecbde870a82c27023910e066)) + +## Changelog diff --git a/packages/google-cloud-managedkafka/CODE_OF_CONDUCT.md b/packages/google-cloud-managedkafka/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-managedkafka/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-managedkafka/CONTRIBUTING.rst b/packages/google-cloud-managedkafka/CONTRIBUTING.rst new file mode 100644 index 000000000000..db0fcd3deac8 --- /dev/null +++ b/packages/google-cloud-managedkafka/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-managedkafka + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-managedkafka/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-managedkafka/LICENSE b/packages/google-cloud-managedkafka/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-managedkafka/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-managedkafka/MANIFEST.in b/packages/google-cloud-managedkafka/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-managedkafka/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-managedkafka/README.rst b/packages/google-cloud-managedkafka/README.rst new file mode 100644 index 000000000000..475bfc1ae7eb --- /dev/null +++ b/packages/google-cloud-managedkafka/README.rst @@ -0,0 +1,108 @@ +Python Client for Apache Kafka for BigQuery API +=============================================== + +|preview| |pypi| |versions| + +`Apache Kafka for BigQuery API`_: Apache Kafka for BigQuery is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-managedkafka.svg + :target: https://pypi.org/project/google-cloud-managedkafka/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-managedkafka.svg + :target: https://pypi.org/project/google-cloud-managedkafka/ +.. _Apache Kafka for BigQuery API: https://cloud.google.com/managed-kafka +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/managed-kafka + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Apache Kafka for BigQuery API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Apache Kafka for BigQuery API.: https://cloud.google.com/managed-kafka +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-managedkafka + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-managedkafka + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Apache Kafka for BigQuery API + to see other available methods on the client. +- Read the `Apache Kafka for BigQuery API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Apache Kafka for BigQuery API Product documentation: https://cloud.google.com/managed-kafka +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-managedkafka/docs/CHANGELOG.md b/packages/google-cloud-managedkafka/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-managedkafka/docs/README.rst b/packages/google-cloud-managedkafka/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-managedkafka/docs/_static/custom.css b/packages/google-cloud-managedkafka/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-managedkafka/docs/_templates/layout.html b/packages/google-cloud-managedkafka/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-managedkafka/docs/conf.py b/packages/google-cloud-managedkafka/docs/conf.py new file mode 100644 index 000000000000..c60ed3930cd2 --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-managedkafka documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-managedkafka" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-managedkafka", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-managedkafka-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-managedkafka.tex", + "google-cloud-managedkafka Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-managedkafka", + "google-cloud-managedkafka Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-managedkafka", + "google-cloud-managedkafka Documentation", + author, + "google-cloud-managedkafka", + "google-cloud-managedkafka Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-managedkafka/docs/index.rst b/packages/google-cloud-managedkafka/docs/index.rst new file mode 100644 index 000000000000..02ed22d74721 --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + managedkafka_v1/services_ + managedkafka_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-managedkafka`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-managedkafka/docs/managedkafka_v1/managed_kafka.rst b/packages/google-cloud-managedkafka/docs/managedkafka_v1/managed_kafka.rst new file mode 100644 index 000000000000..5ed21891a7fc --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/managedkafka_v1/managed_kafka.rst @@ -0,0 +1,10 @@ +ManagedKafka +------------------------------ + +.. automodule:: google.cloud.managedkafka_v1.services.managed_kafka + :members: + :inherited-members: + +.. automodule:: google.cloud.managedkafka_v1.services.managed_kafka.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-managedkafka/docs/managedkafka_v1/services_.rst b/packages/google-cloud-managedkafka/docs/managedkafka_v1/services_.rst new file mode 100644 index 000000000000..3b761cc00c41 --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/managedkafka_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Managedkafka v1 API +============================================= +.. toctree:: + :maxdepth: 2 + + managed_kafka diff --git a/packages/google-cloud-managedkafka/docs/managedkafka_v1/types_.rst b/packages/google-cloud-managedkafka/docs/managedkafka_v1/types_.rst new file mode 100644 index 000000000000..4e78607757cc --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/managedkafka_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Managedkafka v1 API +========================================== + +.. automodule:: google.cloud.managedkafka_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-managedkafka/docs/multiprocessing.rst b/packages/google-cloud-managedkafka/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-managedkafka/docs/summary_overview.md b/packages/google-cloud-managedkafka/docs/summary_overview.md new file mode 100644 index 000000000000..3f9a67edeb83 --- /dev/null +++ b/packages/google-cloud-managedkafka/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Apache Kafka for BigQuery API API + +Overview of the APIs available for Apache Kafka for BigQuery API API. + +## All entries + +Classes, methods and properties & attributes for +Apache Kafka for BigQuery API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest/summary_property.html) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka/__init__.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka/__init__.py new file mode 100644 index 000000000000..71d4f54c5d5f --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka/__init__.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.managedkafka import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.managedkafka_v1.services.managed_kafka.async_client import ( + ManagedKafkaAsyncClient, +) +from google.cloud.managedkafka_v1.services.managed_kafka.client import ( + ManagedKafkaClient, +) +from google.cloud.managedkafka_v1.types.managed_kafka import ( + CreateClusterRequest, + CreateTopicRequest, + DeleteClusterRequest, + DeleteConsumerGroupRequest, + DeleteTopicRequest, + GetClusterRequest, + GetConsumerGroupRequest, + GetTopicRequest, + ListClustersRequest, + ListClustersResponse, + ListConsumerGroupsRequest, + ListConsumerGroupsResponse, + ListTopicsRequest, + ListTopicsResponse, + UpdateClusterRequest, + UpdateConsumerGroupRequest, + UpdateTopicRequest, +) +from google.cloud.managedkafka_v1.types.resources import ( + AccessConfig, + CapacityConfig, + Cluster, + ConsumerGroup, + ConsumerPartitionMetadata, + ConsumerTopicMetadata, + GcpConfig, + NetworkConfig, + OperationMetadata, + RebalanceConfig, + Topic, +) + +__all__ = ( + "ManagedKafkaClient", + "ManagedKafkaAsyncClient", + "CreateClusterRequest", + "CreateTopicRequest", + "DeleteClusterRequest", + "DeleteConsumerGroupRequest", + "DeleteTopicRequest", + "GetClusterRequest", + "GetConsumerGroupRequest", + "GetTopicRequest", + "ListClustersRequest", + "ListClustersResponse", + "ListConsumerGroupsRequest", + "ListConsumerGroupsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "UpdateClusterRequest", + "UpdateConsumerGroupRequest", + "UpdateTopicRequest", + "AccessConfig", + "CapacityConfig", + "Cluster", + "ConsumerGroup", + "ConsumerPartitionMetadata", + "ConsumerTopicMetadata", + "GcpConfig", + "NetworkConfig", + "OperationMetadata", + "RebalanceConfig", + "Topic", +) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka/py.typed b/packages/google-cloud-managedkafka/google/cloud/managedkafka/py.typed new file mode 100644 index 000000000000..07f293eccc33 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-managedkafka package uses inline types. diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/__init__.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/__init__.py new file mode 100644 index 000000000000..df91e6ae3dc5 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/__init__.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.managedkafka_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.managed_kafka import ManagedKafkaAsyncClient, ManagedKafkaClient +from .types.managed_kafka import ( + CreateClusterRequest, + CreateTopicRequest, + DeleteClusterRequest, + DeleteConsumerGroupRequest, + DeleteTopicRequest, + GetClusterRequest, + GetConsumerGroupRequest, + GetTopicRequest, + ListClustersRequest, + ListClustersResponse, + ListConsumerGroupsRequest, + ListConsumerGroupsResponse, + ListTopicsRequest, + ListTopicsResponse, + UpdateClusterRequest, + UpdateConsumerGroupRequest, + UpdateTopicRequest, +) +from .types.resources import ( + AccessConfig, + CapacityConfig, + Cluster, + ConsumerGroup, + ConsumerPartitionMetadata, + ConsumerTopicMetadata, + GcpConfig, + NetworkConfig, + OperationMetadata, + RebalanceConfig, + Topic, +) + +__all__ = ( + "ManagedKafkaAsyncClient", + "AccessConfig", + "CapacityConfig", + "Cluster", + "ConsumerGroup", + "ConsumerPartitionMetadata", + "ConsumerTopicMetadata", + "CreateClusterRequest", + "CreateTopicRequest", + "DeleteClusterRequest", + "DeleteConsumerGroupRequest", + "DeleteTopicRequest", + "GcpConfig", + "GetClusterRequest", + "GetConsumerGroupRequest", + "GetTopicRequest", + "ListClustersRequest", + "ListClustersResponse", + "ListConsumerGroupsRequest", + "ListConsumerGroupsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ManagedKafkaClient", + "NetworkConfig", + "OperationMetadata", + "RebalanceConfig", + "Topic", + "UpdateClusterRequest", + "UpdateConsumerGroupRequest", + "UpdateTopicRequest", +) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_metadata.json b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_metadata.json new file mode 100644 index 000000000000..f17b184ab212 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_metadata.json @@ -0,0 +1,238 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.managedkafka_v1", + "protoPackage": "google.cloud.managedkafka.v1", + "schema": "1.0", + "services": { + "ManagedKafka": { + "clients": { + "grpc": { + "libraryClient": "ManagedKafkaClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteConsumerGroup": { + "methods": [ + "delete_consumer_group" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConsumerGroup": { + "methods": [ + "get_consumer_group" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListConsumerGroups": { + "methods": [ + "list_consumer_groups" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateConsumerGroup": { + "methods": [ + "update_consumer_group" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ManagedKafkaAsyncClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteConsumerGroup": { + "methods": [ + "delete_consumer_group" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConsumerGroup": { + "methods": [ + "get_consumer_group" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListConsumerGroups": { + "methods": [ + "list_consumer_groups" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateConsumerGroup": { + "methods": [ + "update_consumer_group" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + }, + "rest": { + "libraryClient": "ManagedKafkaClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteConsumerGroup": { + "methods": [ + "delete_consumer_group" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConsumerGroup": { + "methods": [ + "get_consumer_group" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListConsumerGroups": { + "methods": [ + "list_consumer_groups" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateConsumerGroup": { + "methods": [ + "update_consumer_group" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/py.typed b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/py.typed new file mode 100644 index 000000000000..07f293eccc33 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-managedkafka package uses inline types. diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/__init__.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/__init__.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/__init__.py new file mode 100644 index 000000000000..9fa0aae85827 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ManagedKafkaAsyncClient +from .client import ManagedKafkaClient + +__all__ = ( + "ManagedKafkaClient", + "ManagedKafkaAsyncClient", +) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py new file mode 100644 index 000000000000..64c5024923c4 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py @@ -0,0 +1,2286 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.managedkafka_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.managedkafka_v1.services.managed_kafka import pagers +from google.cloud.managedkafka_v1.types import managed_kafka, resources + +from .client import ManagedKafkaClient +from .transports.base import DEFAULT_CLIENT_INFO, ManagedKafkaTransport +from .transports.grpc_asyncio import ManagedKafkaGrpcAsyncIOTransport + + +class ManagedKafkaAsyncClient: + """The service that a client application uses to manage Apache + Kafka clusters, topics and consumer groups. + """ + + _client: ManagedKafkaClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ManagedKafkaClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ManagedKafkaClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ManagedKafkaClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ManagedKafkaClient._DEFAULT_UNIVERSE + + cluster_path = staticmethod(ManagedKafkaClient.cluster_path) + parse_cluster_path = staticmethod(ManagedKafkaClient.parse_cluster_path) + consumer_group_path = staticmethod(ManagedKafkaClient.consumer_group_path) + parse_consumer_group_path = staticmethod( + ManagedKafkaClient.parse_consumer_group_path + ) + crypto_key_path = staticmethod(ManagedKafkaClient.crypto_key_path) + parse_crypto_key_path = staticmethod(ManagedKafkaClient.parse_crypto_key_path) + topic_path = staticmethod(ManagedKafkaClient.topic_path) + parse_topic_path = staticmethod(ManagedKafkaClient.parse_topic_path) + common_billing_account_path = staticmethod( + ManagedKafkaClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ManagedKafkaClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ManagedKafkaClient.common_folder_path) + parse_common_folder_path = staticmethod(ManagedKafkaClient.parse_common_folder_path) + common_organization_path = staticmethod(ManagedKafkaClient.common_organization_path) + parse_common_organization_path = staticmethod( + ManagedKafkaClient.parse_common_organization_path + ) + common_project_path = staticmethod(ManagedKafkaClient.common_project_path) + parse_common_project_path = staticmethod( + ManagedKafkaClient.parse_common_project_path + ) + common_location_path = staticmethod(ManagedKafkaClient.common_location_path) + parse_common_location_path = staticmethod( + ManagedKafkaClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedKafkaAsyncClient: The constructed client. + """ + return ManagedKafkaClient.from_service_account_info.__func__(ManagedKafkaAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedKafkaAsyncClient: The constructed client. + """ + return ManagedKafkaClient.from_service_account_file.__func__(ManagedKafkaAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ManagedKafkaClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ManagedKafkaTransport: + """Returns the transport used by the client instance. + + Returns: + ManagedKafkaTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ManagedKafkaClient).get_transport_class, type(ManagedKafkaClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ManagedKafkaTransport, Callable[..., ManagedKafkaTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the managed kafka async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ManagedKafkaTransport,Callable[..., ManagedKafkaTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ManagedKafkaTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ManagedKafkaClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_clusters( + self, + request: Optional[Union[managed_kafka.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersAsyncPager: + r"""Lists the clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_list_clusters(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.ListClustersRequest, dict]]): + The request object. Request for ListClusters. + parent (:class:`str`): + Required. The parent location whose clusters are to be + listed. Structured like + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListClustersAsyncPager: + Response for ListClusters. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.ListClustersRequest): + request = managed_kafka.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_clusters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[managed_kafka.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Returns the properties of a single cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_get_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.GetClusterRequest, dict]]): + The request object. Request for GetCluster. + name (:class:`str`): + Required. The name of the cluster + whose configuration to return. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Cluster: + An Apache Kafka cluster deployed in a + location. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.GetClusterRequest): + request = managed_kafka.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cluster( + self, + request: Optional[Union[managed_kafka.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_create_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.CreateClusterRequest, dict]]): + The request object. Request for CreateCluster. + parent (:class:`str`): + Required. The parent region in which to create the + cluster. Structured like + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.managedkafka_v1.types.Cluster`): + Required. Configuration of the cluster to create. Its + ``name`` field is ignored. + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. The ID to use for the cluster, which will + become the final component of the cluster's name. The ID + must be 1-63 characters long, and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` to comply with + RFC 1035. + + This value is structured like: ``my-cluster-id``. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.managedkafka_v1.types.Cluster` An + Apache Kafka cluster deployed in a location. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.CreateClusterRequest): + request = managed_kafka.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[managed_kafka.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the properties of a single cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_update_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.UpdateClusterRequest, dict]]): + The request object. Request for UpdateCluster. + cluster (:class:`google.cloud.managedkafka_v1.types.Cluster`): + Required. The cluster to update. Its ``name`` field must + be populated. + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. The mask is required + and a value of \* will update all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.managedkafka_v1.types.Cluster` An + Apache Kafka cluster deployed in a location. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.UpdateClusterRequest): + request = managed_kafka.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[managed_kafka.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_delete_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.DeleteClusterRequest, dict]]): + The request object. Request for DeleteCluster. + name (:class:`str`): + Required. The name of the cluster to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.DeleteClusterRequest): + request = managed_kafka.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_topics( + self, + request: Optional[Union[managed_kafka.ListTopicsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsAsyncPager: + r"""Lists the topics in a given cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_list_topics(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListTopicsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.ListTopicsRequest, dict]]): + The request object. Request for ListTopics. + parent (:class:`str`): + Required. The parent cluster whose topics are to be + listed. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListTopicsAsyncPager: + Response for ListTopics. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.ListTopicsRequest): + request = managed_kafka.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topics + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_topic( + self, + request: Optional[Union[managed_kafka.GetTopicRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Returns the properties of a single topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_get_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetTopicRequest( + name="name_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.GetTopicRequest, dict]]): + The request object. Request for GetTopic. + name (:class:`str`): + Required. The name of the topic whose + configuration to return. Structured + like: + + projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Topic: + A Kafka topic in a given cluster. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.GetTopicRequest): + request = managed_kafka.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_topic( + self, + request: Optional[Union[managed_kafka.CreateTopicRequest, dict]] = None, + *, + parent: Optional[str] = None, + topic: Optional[resources.Topic] = None, + topic_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Creates a new topic in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_create_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.CreateTopicRequest( + parent="parent_value", + topic_id="topic_id_value", + topic=topic, + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.CreateTopicRequest, dict]]): + The request object. Request for CreateTopic. + parent (:class:`str`): + Required. The parent cluster in which to create the + topic. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`google.cloud.managedkafka_v1.types.Topic`): + Required. Configuration of the topic to create. Its + ``name`` field is ignored. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic_id (:class:`str`): + Required. The ID to use for the topic, which will become + the final component of the topic's name. + + This value is structured like: ``my-topic-name``. + + This corresponds to the ``topic_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Topic: + A Kafka topic in a given cluster. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, topic, topic_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.CreateTopicRequest): + request = managed_kafka.CreateTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if topic is not None: + request.topic = topic + if topic_id is not None: + request.topic_id = topic_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_topic( + self, + request: Optional[Union[managed_kafka.UpdateTopicRequest, dict]] = None, + *, + topic: Optional[resources.Topic] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Updates the properties of a single topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_update_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.UpdateTopicRequest, dict]]): + The request object. Request for UpdateTopic. + topic (:class:`google.cloud.managedkafka_v1.types.Topic`): + Required. The topic to update. Its ``name`` field must + be populated. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Topic resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. The mask is required + and a value of \* will update all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Topic: + A Kafka topic in a given cluster. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.UpdateTopicRequest): + request = managed_kafka.UpdateTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_topic( + self, + request: Optional[Union[managed_kafka.DeleteTopicRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_delete_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteTopicRequest( + name="name_value", + ) + + # Make the request + await client.delete_topic(request=request) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.DeleteTopicRequest, dict]]): + The request object. Request for DeleteTopic. + name (:class:`str`): + Required. The name of the topic to delete. + ``projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.DeleteTopicRequest): + request = managed_kafka.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_consumer_groups( + self, + request: Optional[Union[managed_kafka.ListConsumerGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConsumerGroupsAsyncPager: + r"""Lists the consumer groups in a given cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_list_consumer_groups(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListConsumerGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_consumer_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.ListConsumerGroupsRequest, dict]]): + The request object. Request for ListConsumerGroups. + parent (:class:`str`): + Required. The parent cluster whose consumer groups are + to be listed. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListConsumerGroupsAsyncPager: + Response for ListConsumerGroups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.ListConsumerGroupsRequest): + request = managed_kafka.ListConsumerGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_consumer_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConsumerGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_consumer_group( + self, + request: Optional[Union[managed_kafka.GetConsumerGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConsumerGroup: + r"""Returns the properties of a single consumer group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_get_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetConsumerGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_consumer_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.GetConsumerGroupRequest, dict]]): + The request object. Request for GetConsumerGroup. + name (:class:`str`): + Required. The name of the consumer group whose + configuration to return. + ``projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.ConsumerGroup: + A Kafka consumer group in a given + cluster. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.GetConsumerGroupRequest): + request = managed_kafka.GetConsumerGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_consumer_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_consumer_group( + self, + request: Optional[Union[managed_kafka.UpdateConsumerGroupRequest, dict]] = None, + *, + consumer_group: Optional[resources.ConsumerGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConsumerGroup: + r"""Updates the properties of a single consumer group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_update_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.UpdateConsumerGroupRequest( + ) + + # Make the request + response = await client.update_consumer_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.UpdateConsumerGroupRequest, dict]]): + The request object. Request for UpdateConsumerGroup. + consumer_group (:class:`google.cloud.managedkafka_v1.types.ConsumerGroup`): + Required. The consumer group to update. Its ``name`` + field must be populated. + + This corresponds to the ``consumer_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ConsumerGroup resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. The mask is required + and a value of \* will update all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.ConsumerGroup: + A Kafka consumer group in a given + cluster. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([consumer_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.UpdateConsumerGroupRequest): + request = managed_kafka.UpdateConsumerGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if consumer_group is not None: + request.consumer_group = consumer_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_consumer_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("consumer_group.name", request.consumer_group.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_consumer_group( + self, + request: Optional[Union[managed_kafka.DeleteConsumerGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single consumer group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + async def sample_delete_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteConsumerGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_consumer_group(request=request) + + Args: + request (Optional[Union[google.cloud.managedkafka_v1.types.DeleteConsumerGroupRequest, dict]]): + The request object. Request for DeleteConsumerGroup. + name (:class:`str`): + Required. The name of the consumer group to delete. + ``projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.DeleteConsumerGroupRequest): + request = managed_kafka.DeleteConsumerGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_consumer_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ManagedKafkaAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ManagedKafkaAsyncClient",) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py new file mode 100644 index 000000000000..7368ddf2b27d --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py @@ -0,0 +1,2734 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.managedkafka_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.managedkafka_v1.services.managed_kafka import pagers +from google.cloud.managedkafka_v1.types import managed_kafka, resources + +from .transports.base import DEFAULT_CLIENT_INFO, ManagedKafkaTransport +from .transports.grpc import ManagedKafkaGrpcTransport +from .transports.grpc_asyncio import ManagedKafkaGrpcAsyncIOTransport +from .transports.rest import ManagedKafkaRestTransport + + +class ManagedKafkaClientMeta(type): + """Metaclass for the ManagedKafka client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ManagedKafkaTransport]] + _transport_registry["grpc"] = ManagedKafkaGrpcTransport + _transport_registry["grpc_asyncio"] = ManagedKafkaGrpcAsyncIOTransport + _transport_registry["rest"] = ManagedKafkaRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ManagedKafkaTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ManagedKafkaClient(metaclass=ManagedKafkaClientMeta): + """The service that a client application uses to manage Apache + Kafka clusters, topics and consumer groups. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "managedkafka.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "managedkafka.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedKafkaClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedKafkaClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ManagedKafkaTransport: + """Returns the transport used by the client instance. + + Returns: + ManagedKafkaTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cluster_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def consumer_group_path( + project: str, + location: str, + cluster: str, + consumer_group: str, + ) -> str: + """Returns a fully-qualified consumer_group string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumer_group}".format( + project=project, + location=location, + cluster=cluster, + consumer_group=consumer_group, + ) + + @staticmethod + def parse_consumer_group_path(path: str) -> Dict[str, str]: + """Parses a consumer_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/consumerGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def topic_path( + project: str, + location: str, + cluster: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}".format( + project=project, + location=location, + cluster=cluster, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/topics/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ManagedKafkaClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ManagedKafkaClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ManagedKafkaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ManagedKafkaClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ManagedKafkaClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ManagedKafkaClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ManagedKafkaTransport, Callable[..., ManagedKafkaTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the managed kafka client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ManagedKafkaTransport,Callable[..., ManagedKafkaTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ManagedKafkaTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ManagedKafkaClient._read_environment_variables() + self._client_cert_source = ManagedKafkaClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ManagedKafkaClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ManagedKafkaTransport) + if transport_provided: + # transport is a ManagedKafkaTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ManagedKafkaTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or ManagedKafkaClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ManagedKafkaTransport], Callable[..., ManagedKafkaTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ManagedKafkaTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_clusters( + self, + request: Optional[Union[managed_kafka.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersPager: + r"""Lists the clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_list_clusters(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.ListClustersRequest, dict]): + The request object. Request for ListClusters. + parent (str): + Required. The parent location whose clusters are to be + listed. Structured like + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListClustersPager: + Response for ListClusters. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.ListClustersRequest): + request = managed_kafka.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[managed_kafka.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Returns the properties of a single cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_get_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.GetClusterRequest, dict]): + The request object. Request for GetCluster. + name (str): + Required. The name of the cluster + whose configuration to return. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Cluster: + An Apache Kafka cluster deployed in a + location. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.GetClusterRequest): + request = managed_kafka.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cluster( + self, + request: Optional[Union[managed_kafka.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_create_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.CreateClusterRequest, dict]): + The request object. Request for CreateCluster. + parent (str): + Required. The parent region in which to create the + cluster. Structured like + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.managedkafka_v1.types.Cluster): + Required. Configuration of the cluster to create. Its + ``name`` field is ignored. + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. The ID to use for the cluster, which will + become the final component of the cluster's name. The ID + must be 1-63 characters long, and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` to comply with + RFC 1035. + + This value is structured like: ``my-cluster-id``. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.managedkafka_v1.types.Cluster` An + Apache Kafka cluster deployed in a location. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.CreateClusterRequest): + request = managed_kafka.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[managed_kafka.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the properties of a single cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_update_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.UpdateClusterRequest, dict]): + The request object. Request for UpdateCluster. + cluster (google.cloud.managedkafka_v1.types.Cluster): + Required. The cluster to update. Its ``name`` field must + be populated. + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. The mask is required + and a value of \* will update all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.managedkafka_v1.types.Cluster` An + Apache Kafka cluster deployed in a location. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.UpdateClusterRequest): + request = managed_kafka.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[managed_kafka.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_delete_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.DeleteClusterRequest, dict]): + The request object. Request for DeleteCluster. + name (str): + Required. The name of the cluster to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.DeleteClusterRequest): + request = managed_kafka.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_topics( + self, + request: Optional[Union[managed_kafka.ListTopicsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: + r"""Lists the topics in a given cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_list_topics(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListTopicsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.ListTopicsRequest, dict]): + The request object. Request for ListTopics. + parent (str): + Required. The parent cluster whose topics are to be + listed. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListTopicsPager: + Response for ListTopics. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.ListTopicsRequest): + request = managed_kafka.ListTopicsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_topic( + self, + request: Optional[Union[managed_kafka.GetTopicRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Returns the properties of a single topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_get_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetTopicRequest( + name="name_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.GetTopicRequest, dict]): + The request object. Request for GetTopic. + name (str): + Required. The name of the topic whose + configuration to return. Structured + like: + + projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Topic: + A Kafka topic in a given cluster. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.GetTopicRequest): + request = managed_kafka.GetTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_topic( + self, + request: Optional[Union[managed_kafka.CreateTopicRequest, dict]] = None, + *, + parent: Optional[str] = None, + topic: Optional[resources.Topic] = None, + topic_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Creates a new topic in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_create_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.CreateTopicRequest( + parent="parent_value", + topic_id="topic_id_value", + topic=topic, + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.CreateTopicRequest, dict]): + The request object. Request for CreateTopic. + parent (str): + Required. The parent cluster in which to create the + topic. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (google.cloud.managedkafka_v1.types.Topic): + Required. Configuration of the topic to create. Its + ``name`` field is ignored. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic_id (str): + Required. The ID to use for the topic, which will become + the final component of the topic's name. + + This value is structured like: ``my-topic-name``. + + This corresponds to the ``topic_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Topic: + A Kafka topic in a given cluster. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, topic, topic_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.CreateTopicRequest): + request = managed_kafka.CreateTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if topic is not None: + request.topic = topic + if topic_id is not None: + request.topic_id = topic_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_topic( + self, + request: Optional[Union[managed_kafka.UpdateTopicRequest, dict]] = None, + *, + topic: Optional[resources.Topic] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Updates the properties of a single topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_update_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.UpdateTopicRequest, dict]): + The request object. Request for UpdateTopic. + topic (google.cloud.managedkafka_v1.types.Topic): + Required. The topic to update. Its ``name`` field must + be populated. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Topic resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. The mask is required + and a value of \* will update all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.Topic: + A Kafka topic in a given cluster. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.UpdateTopicRequest): + request = managed_kafka.UpdateTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_topic( + self, + request: Optional[Union[managed_kafka.DeleteTopicRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_delete_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteTopicRequest( + name="name_value", + ) + + # Make the request + client.delete_topic(request=request) + + Args: + request (Union[google.cloud.managedkafka_v1.types.DeleteTopicRequest, dict]): + The request object. Request for DeleteTopic. + name (str): + Required. The name of the topic to delete. + ``projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.DeleteTopicRequest): + request = managed_kafka.DeleteTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_consumer_groups( + self, + request: Optional[Union[managed_kafka.ListConsumerGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConsumerGroupsPager: + r"""Lists the consumer groups in a given cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_list_consumer_groups(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListConsumerGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_consumer_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.ListConsumerGroupsRequest, dict]): + The request object. Request for ListConsumerGroups. + parent (str): + Required. The parent cluster whose consumer groups are + to be listed. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListConsumerGroupsPager: + Response for ListConsumerGroups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.ListConsumerGroupsRequest): + request = managed_kafka.ListConsumerGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_consumer_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConsumerGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_consumer_group( + self, + request: Optional[Union[managed_kafka.GetConsumerGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConsumerGroup: + r"""Returns the properties of a single consumer group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_get_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetConsumerGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_consumer_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.GetConsumerGroupRequest, dict]): + The request object. Request for GetConsumerGroup. + name (str): + Required. The name of the consumer group whose + configuration to return. + ``projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.ConsumerGroup: + A Kafka consumer group in a given + cluster. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.GetConsumerGroupRequest): + request = managed_kafka.GetConsumerGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_consumer_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_consumer_group( + self, + request: Optional[Union[managed_kafka.UpdateConsumerGroupRequest, dict]] = None, + *, + consumer_group: Optional[resources.ConsumerGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConsumerGroup: + r"""Updates the properties of a single consumer group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_update_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.UpdateConsumerGroupRequest( + ) + + # Make the request + response = client.update_consumer_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.managedkafka_v1.types.UpdateConsumerGroupRequest, dict]): + The request object. Request for UpdateConsumerGroup. + consumer_group (google.cloud.managedkafka_v1.types.ConsumerGroup): + Required. The consumer group to update. Its ``name`` + field must be populated. + + This corresponds to the ``consumer_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ConsumerGroup resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. The mask is required + and a value of \* will update all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.managedkafka_v1.types.ConsumerGroup: + A Kafka consumer group in a given + cluster. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([consumer_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.UpdateConsumerGroupRequest): + request = managed_kafka.UpdateConsumerGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if consumer_group is not None: + request.consumer_group = consumer_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_consumer_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("consumer_group.name", request.consumer_group.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_consumer_group( + self, + request: Optional[Union[managed_kafka.DeleteConsumerGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single consumer group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import managedkafka_v1 + + def sample_delete_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteConsumerGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_consumer_group(request=request) + + Args: + request (Union[google.cloud.managedkafka_v1.types.DeleteConsumerGroupRequest, dict]): + The request object. Request for DeleteConsumerGroup. + name (str): + Required. The name of the consumer group to delete. + ``projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, managed_kafka.DeleteConsumerGroupRequest): + request = managed_kafka.DeleteConsumerGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_consumer_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "ManagedKafkaClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ManagedKafkaClient",) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/pagers.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/pagers.py new file mode 100644 index 000000000000..b08710fe82dc --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/pagers.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.managedkafka_v1.types import managed_kafka, resources + + +class ListClustersPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.managedkafka_v1.types.ListClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.managedkafka_v1.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., managed_kafka.ListClustersResponse], + request: managed_kafka.ListClustersRequest, + response: managed_kafka.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.managedkafka_v1.types.ListClustersRequest): + The initial request object. + response (google.cloud.managedkafka_v1.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_kafka.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[managed_kafka.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Cluster]: + for page in self.pages: + yield from page.clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClustersAsyncPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.managedkafka_v1.types.ListClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.managedkafka_v1.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[managed_kafka.ListClustersResponse]], + request: managed_kafka.ListClustersRequest, + response: managed_kafka.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.managedkafka_v1.types.ListClustersRequest): + The initial request object. + response (google.cloud.managedkafka_v1.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_kafka.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[managed_kafka.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Cluster]: + async def async_generator(): + async for page in self.pages: + for response in page.clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicsPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.managedkafka_v1.types.ListTopicsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.managedkafka_v1.types.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., managed_kafka.ListTopicsResponse], + request: managed_kafka.ListTopicsRequest, + response: managed_kafka.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.managedkafka_v1.types.ListTopicsRequest): + The initial request object. + response (google.cloud.managedkafka_v1.types.ListTopicsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_kafka.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[managed_kafka.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Topic]: + for page in self.pages: + yield from page.topics + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicsAsyncPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.managedkafka_v1.types.ListTopicsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.managedkafka_v1.types.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[managed_kafka.ListTopicsResponse]], + request: managed_kafka.ListTopicsRequest, + response: managed_kafka.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.managedkafka_v1.types.ListTopicsRequest): + The initial request object. + response (google.cloud.managedkafka_v1.types.ListTopicsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_kafka.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[managed_kafka.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Topic]: + async def async_generator(): + async for page in self.pages: + for response in page.topics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConsumerGroupsPager: + """A pager for iterating through ``list_consumer_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.managedkafka_v1.types.ListConsumerGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``consumer_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConsumerGroups`` requests and continue to iterate + through the ``consumer_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.managedkafka_v1.types.ListConsumerGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., managed_kafka.ListConsumerGroupsResponse], + request: managed_kafka.ListConsumerGroupsRequest, + response: managed_kafka.ListConsumerGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.managedkafka_v1.types.ListConsumerGroupsRequest): + The initial request object. + response (google.cloud.managedkafka_v1.types.ListConsumerGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_kafka.ListConsumerGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[managed_kafka.ListConsumerGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.ConsumerGroup]: + for page in self.pages: + yield from page.consumer_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConsumerGroupsAsyncPager: + """A pager for iterating through ``list_consumer_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.managedkafka_v1.types.ListConsumerGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``consumer_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConsumerGroups`` requests and continue to iterate + through the ``consumer_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.managedkafka_v1.types.ListConsumerGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[managed_kafka.ListConsumerGroupsResponse]], + request: managed_kafka.ListConsumerGroupsRequest, + response: managed_kafka.ListConsumerGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.managedkafka_v1.types.ListConsumerGroupsRequest): + The initial request object. + response (google.cloud.managedkafka_v1.types.ListConsumerGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_kafka.ListConsumerGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[managed_kafka.ListConsumerGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.ConsumerGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.consumer_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/__init__.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/__init__.py new file mode 100644 index 000000000000..fd5098983f13 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ManagedKafkaTransport +from .grpc import ManagedKafkaGrpcTransport +from .grpc_asyncio import ManagedKafkaGrpcAsyncIOTransport +from .rest import ManagedKafkaRestInterceptor, ManagedKafkaRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ManagedKafkaTransport]] +_transport_registry["grpc"] = ManagedKafkaGrpcTransport +_transport_registry["grpc_asyncio"] = ManagedKafkaGrpcAsyncIOTransport +_transport_registry["rest"] = ManagedKafkaRestTransport + +__all__ = ( + "ManagedKafkaTransport", + "ManagedKafkaGrpcTransport", + "ManagedKafkaGrpcAsyncIOTransport", + "ManagedKafkaRestTransport", + "ManagedKafkaRestInterceptor", +) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/base.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/base.py new file mode 100644 index 000000000000..0b0ea6ee322e --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/base.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.managedkafka_v1 import gapic_version as package_version +from google.cloud.managedkafka_v1.types import managed_kafka, resources + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ManagedKafkaTransport(abc.ABC): + """Abstract transport class for ManagedKafka.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "managedkafka.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'managedkafka.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method.wrap_method( + self.list_topics, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method.wrap_method( + self.get_topic, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_topic: gapic_v1.method.wrap_method( + self.create_topic, + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method.wrap_method( + self.update_topic, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method.wrap_method( + self.delete_topic, + default_timeout=60.0, + client_info=client_info, + ), + self.list_consumer_groups: gapic_v1.method.wrap_method( + self.list_consumer_groups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_consumer_group: gapic_v1.method.wrap_method( + self.get_consumer_group, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_consumer_group: gapic_v1.method.wrap_method( + self.update_consumer_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_consumer_group: gapic_v1.method.wrap_method( + self.delete_consumer_group, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [managed_kafka.ListClustersRequest], + Union[ + managed_kafka.ListClustersResponse, + Awaitable[managed_kafka.ListClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [managed_kafka.GetClusterRequest], + Union[resources.Cluster, Awaitable[resources.Cluster]], + ]: + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [managed_kafka.CreateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [managed_kafka.UpdateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [managed_kafka.DeleteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_topics( + self, + ) -> Callable[ + [managed_kafka.ListTopicsRequest], + Union[ + managed_kafka.ListTopicsResponse, + Awaitable[managed_kafka.ListTopicsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_topic( + self, + ) -> Callable[ + [managed_kafka.GetTopicRequest], + Union[resources.Topic, Awaitable[resources.Topic]], + ]: + raise NotImplementedError() + + @property + def create_topic( + self, + ) -> Callable[ + [managed_kafka.CreateTopicRequest], + Union[resources.Topic, Awaitable[resources.Topic]], + ]: + raise NotImplementedError() + + @property + def update_topic( + self, + ) -> Callable[ + [managed_kafka.UpdateTopicRequest], + Union[resources.Topic, Awaitable[resources.Topic]], + ]: + raise NotImplementedError() + + @property + def delete_topic( + self, + ) -> Callable[ + [managed_kafka.DeleteTopicRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_consumer_groups( + self, + ) -> Callable[ + [managed_kafka.ListConsumerGroupsRequest], + Union[ + managed_kafka.ListConsumerGroupsResponse, + Awaitable[managed_kafka.ListConsumerGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_consumer_group( + self, + ) -> Callable[ + [managed_kafka.GetConsumerGroupRequest], + Union[resources.ConsumerGroup, Awaitable[resources.ConsumerGroup]], + ]: + raise NotImplementedError() + + @property + def update_consumer_group( + self, + ) -> Callable[ + [managed_kafka.UpdateConsumerGroupRequest], + Union[resources.ConsumerGroup, Awaitable[resources.ConsumerGroup]], + ]: + raise NotImplementedError() + + @property + def delete_consumer_group( + self, + ) -> Callable[ + [managed_kafka.DeleteConsumerGroupRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ManagedKafkaTransport",) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/grpc.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/grpc.py new file mode 100644 index 000000000000..c6dc921612f2 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/grpc.py @@ -0,0 +1,737 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.managedkafka_v1.types import managed_kafka, resources + +from .base import DEFAULT_CLIENT_INFO, ManagedKafkaTransport + + +class ManagedKafkaGrpcTransport(ManagedKafkaTransport): + """gRPC backend transport for ManagedKafka. + + The service that a client application uses to manage Apache + Kafka clusters, topics and consumer groups. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "managedkafka.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'managedkafka.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "managedkafka.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[ + [managed_kafka.ListClustersRequest], managed_kafka.ListClustersResponse + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists the clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/ListClusters", + request_serializer=managed_kafka.ListClustersRequest.serialize, + response_deserializer=managed_kafka.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[managed_kafka.GetClusterRequest], resources.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Returns the properties of a single cluster. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/GetCluster", + request_serializer=managed_kafka.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[managed_kafka.CreateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/CreateCluster", + request_serializer=managed_kafka.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[managed_kafka.UpdateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the properties of a single cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/UpdateCluster", + request_serializer=managed_kafka.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[managed_kafka.DeleteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/DeleteCluster", + request_serializer=managed_kafka.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def list_topics( + self, + ) -> Callable[[managed_kafka.ListTopicsRequest], managed_kafka.ListTopicsResponse]: + r"""Return a callable for the list topics method over gRPC. + + Lists the topics in a given cluster. + + Returns: + Callable[[~.ListTopicsRequest], + ~.ListTopicsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/ListTopics", + request_serializer=managed_kafka.ListTopicsRequest.serialize, + response_deserializer=managed_kafka.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def get_topic(self) -> Callable[[managed_kafka.GetTopicRequest], resources.Topic]: + r"""Return a callable for the get topic method over gRPC. + + Returns the properties of a single topic. + + Returns: + Callable[[~.GetTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/GetTopic", + request_serializer=managed_kafka.GetTopicRequest.serialize, + response_deserializer=resources.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def create_topic( + self, + ) -> Callable[[managed_kafka.CreateTopicRequest], resources.Topic]: + r"""Return a callable for the create topic method over gRPC. + + Creates a new topic in a given project and location. + + Returns: + Callable[[~.CreateTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/CreateTopic", + request_serializer=managed_kafka.CreateTopicRequest.serialize, + response_deserializer=resources.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic( + self, + ) -> Callable[[managed_kafka.UpdateTopicRequest], resources.Topic]: + r"""Return a callable for the update topic method over gRPC. + + Updates the properties of a single topic. + + Returns: + Callable[[~.UpdateTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/UpdateTopic", + request_serializer=managed_kafka.UpdateTopicRequest.serialize, + response_deserializer=resources.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def delete_topic( + self, + ) -> Callable[[managed_kafka.DeleteTopicRequest], empty_pb2.Empty]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes a single topic. + + Returns: + Callable[[~.DeleteTopicRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/DeleteTopic", + request_serializer=managed_kafka.DeleteTopicRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def list_consumer_groups( + self, + ) -> Callable[ + [managed_kafka.ListConsumerGroupsRequest], + managed_kafka.ListConsumerGroupsResponse, + ]: + r"""Return a callable for the list consumer groups method over gRPC. + + Lists the consumer groups in a given cluster. + + Returns: + Callable[[~.ListConsumerGroupsRequest], + ~.ListConsumerGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_consumer_groups" not in self._stubs: + self._stubs["list_consumer_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/ListConsumerGroups", + request_serializer=managed_kafka.ListConsumerGroupsRequest.serialize, + response_deserializer=managed_kafka.ListConsumerGroupsResponse.deserialize, + ) + return self._stubs["list_consumer_groups"] + + @property + def get_consumer_group( + self, + ) -> Callable[[managed_kafka.GetConsumerGroupRequest], resources.ConsumerGroup]: + r"""Return a callable for the get consumer group method over gRPC. + + Returns the properties of a single consumer group. + + Returns: + Callable[[~.GetConsumerGroupRequest], + ~.ConsumerGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_consumer_group" not in self._stubs: + self._stubs["get_consumer_group"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/GetConsumerGroup", + request_serializer=managed_kafka.GetConsumerGroupRequest.serialize, + response_deserializer=resources.ConsumerGroup.deserialize, + ) + return self._stubs["get_consumer_group"] + + @property + def update_consumer_group( + self, + ) -> Callable[[managed_kafka.UpdateConsumerGroupRequest], resources.ConsumerGroup]: + r"""Return a callable for the update consumer group method over gRPC. + + Updates the properties of a single consumer group. + + Returns: + Callable[[~.UpdateConsumerGroupRequest], + ~.ConsumerGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_consumer_group" not in self._stubs: + self._stubs["update_consumer_group"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/UpdateConsumerGroup", + request_serializer=managed_kafka.UpdateConsumerGroupRequest.serialize, + response_deserializer=resources.ConsumerGroup.deserialize, + ) + return self._stubs["update_consumer_group"] + + @property + def delete_consumer_group( + self, + ) -> Callable[[managed_kafka.DeleteConsumerGroupRequest], empty_pb2.Empty]: + r"""Return a callable for the delete consumer group method over gRPC. + + Deletes a single consumer group. + + Returns: + Callable[[~.DeleteConsumerGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_consumer_group" not in self._stubs: + self._stubs["delete_consumer_group"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/DeleteConsumerGroup", + request_serializer=managed_kafka.DeleteConsumerGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_consumer_group"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ManagedKafkaGrpcTransport",) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/grpc_asyncio.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/grpc_asyncio.py new file mode 100644 index 000000000000..50ff49fee37d --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/grpc_asyncio.py @@ -0,0 +1,885 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.managedkafka_v1.types import managed_kafka, resources + +from .base import DEFAULT_CLIENT_INFO, ManagedKafkaTransport +from .grpc import ManagedKafkaGrpcTransport + + +class ManagedKafkaGrpcAsyncIOTransport(ManagedKafkaTransport): + """gRPC AsyncIO backend transport for ManagedKafka. + + The service that a client application uses to manage Apache + Kafka clusters, topics and consumer groups. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "managedkafka.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "managedkafka.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'managedkafka.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[ + [managed_kafka.ListClustersRequest], + Awaitable[managed_kafka.ListClustersResponse], + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists the clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/ListClusters", + request_serializer=managed_kafka.ListClustersRequest.serialize, + response_deserializer=managed_kafka.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[managed_kafka.GetClusterRequest], Awaitable[resources.Cluster]]: + r"""Return a callable for the get cluster method over gRPC. + + Returns the properties of a single cluster. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/GetCluster", + request_serializer=managed_kafka.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[ + [managed_kafka.CreateClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/CreateCluster", + request_serializer=managed_kafka.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[ + [managed_kafka.UpdateClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the properties of a single cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/UpdateCluster", + request_serializer=managed_kafka.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[ + [managed_kafka.DeleteClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/DeleteCluster", + request_serializer=managed_kafka.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def list_topics( + self, + ) -> Callable[ + [managed_kafka.ListTopicsRequest], Awaitable[managed_kafka.ListTopicsResponse] + ]: + r"""Return a callable for the list topics method over gRPC. + + Lists the topics in a given cluster. + + Returns: + Callable[[~.ListTopicsRequest], + Awaitable[~.ListTopicsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/ListTopics", + request_serializer=managed_kafka.ListTopicsRequest.serialize, + response_deserializer=managed_kafka.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def get_topic( + self, + ) -> Callable[[managed_kafka.GetTopicRequest], Awaitable[resources.Topic]]: + r"""Return a callable for the get topic method over gRPC. + + Returns the properties of a single topic. + + Returns: + Callable[[~.GetTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/GetTopic", + request_serializer=managed_kafka.GetTopicRequest.serialize, + response_deserializer=resources.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def create_topic( + self, + ) -> Callable[[managed_kafka.CreateTopicRequest], Awaitable[resources.Topic]]: + r"""Return a callable for the create topic method over gRPC. + + Creates a new topic in a given project and location. + + Returns: + Callable[[~.CreateTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/CreateTopic", + request_serializer=managed_kafka.CreateTopicRequest.serialize, + response_deserializer=resources.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic( + self, + ) -> Callable[[managed_kafka.UpdateTopicRequest], Awaitable[resources.Topic]]: + r"""Return a callable for the update topic method over gRPC. + + Updates the properties of a single topic. + + Returns: + Callable[[~.UpdateTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/UpdateTopic", + request_serializer=managed_kafka.UpdateTopicRequest.serialize, + response_deserializer=resources.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def delete_topic( + self, + ) -> Callable[[managed_kafka.DeleteTopicRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes a single topic. + + Returns: + Callable[[~.DeleteTopicRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/DeleteTopic", + request_serializer=managed_kafka.DeleteTopicRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def list_consumer_groups( + self, + ) -> Callable[ + [managed_kafka.ListConsumerGroupsRequest], + Awaitable[managed_kafka.ListConsumerGroupsResponse], + ]: + r"""Return a callable for the list consumer groups method over gRPC. + + Lists the consumer groups in a given cluster. + + Returns: + Callable[[~.ListConsumerGroupsRequest], + Awaitable[~.ListConsumerGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_consumer_groups" not in self._stubs: + self._stubs["list_consumer_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/ListConsumerGroups", + request_serializer=managed_kafka.ListConsumerGroupsRequest.serialize, + response_deserializer=managed_kafka.ListConsumerGroupsResponse.deserialize, + ) + return self._stubs["list_consumer_groups"] + + @property + def get_consumer_group( + self, + ) -> Callable[ + [managed_kafka.GetConsumerGroupRequest], Awaitable[resources.ConsumerGroup] + ]: + r"""Return a callable for the get consumer group method over gRPC. + + Returns the properties of a single consumer group. + + Returns: + Callable[[~.GetConsumerGroupRequest], + Awaitable[~.ConsumerGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_consumer_group" not in self._stubs: + self._stubs["get_consumer_group"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/GetConsumerGroup", + request_serializer=managed_kafka.GetConsumerGroupRequest.serialize, + response_deserializer=resources.ConsumerGroup.deserialize, + ) + return self._stubs["get_consumer_group"] + + @property + def update_consumer_group( + self, + ) -> Callable[ + [managed_kafka.UpdateConsumerGroupRequest], Awaitable[resources.ConsumerGroup] + ]: + r"""Return a callable for the update consumer group method over gRPC. + + Updates the properties of a single consumer group. + + Returns: + Callable[[~.UpdateConsumerGroupRequest], + Awaitable[~.ConsumerGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_consumer_group" not in self._stubs: + self._stubs["update_consumer_group"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/UpdateConsumerGroup", + request_serializer=managed_kafka.UpdateConsumerGroupRequest.serialize, + response_deserializer=resources.ConsumerGroup.deserialize, + ) + return self._stubs["update_consumer_group"] + + @property + def delete_consumer_group( + self, + ) -> Callable[ + [managed_kafka.DeleteConsumerGroupRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete consumer group method over gRPC. + + Deletes a single consumer group. + + Returns: + Callable[[~.DeleteConsumerGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_consumer_group" not in self._stubs: + self._stubs["delete_consumer_group"] = self.grpc_channel.unary_unary( + "/google.cloud.managedkafka.v1.ManagedKafka/DeleteConsumerGroup", + request_serializer=managed_kafka.DeleteConsumerGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_consumer_group"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_clusters: gapic_v1.method_async.wrap_method( + self.list_clusters, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method_async.wrap_method( + self.get_cluster, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method_async.wrap_method( + self.create_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method_async.wrap_method( + self.update_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method_async.wrap_method( + self.delete_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method_async.wrap_method( + self.list_topics, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method_async.wrap_method( + self.get_topic, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_topic: gapic_v1.method_async.wrap_method( + self.create_topic, + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method_async.wrap_method( + self.update_topic, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method_async.wrap_method( + self.delete_topic, + default_timeout=60.0, + client_info=client_info, + ), + self.list_consumer_groups: gapic_v1.method_async.wrap_method( + self.list_consumer_groups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_consumer_group: gapic_v1.method_async.wrap_method( + self.get_consumer_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_consumer_group: gapic_v1.method_async.wrap_method( + self.update_consumer_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_consumer_group: gapic_v1.method_async.wrap_method( + self.delete_consumer_group, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ManagedKafkaGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py new file mode 100644 index 000000000000..22610c7c0ae5 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py @@ -0,0 +1,2506 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.managedkafka_v1.types import managed_kafka, resources + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ManagedKafkaTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ManagedKafkaRestInterceptor: + """Interceptor for ManagedKafka. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ManagedKafkaRestTransport. + + .. code-block:: python + class MyCustomManagedKafkaInterceptor(ManagedKafkaRestInterceptor): + def pre_create_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_topic(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_consumer_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_consumer_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_consumer_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_topic(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_consumer_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_consumer_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topics(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_consumer_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_consumer_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_topic(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ManagedKafkaRestTransport(interceptor=MyCustomManagedKafkaInterceptor()) + client = ManagedKafkaClient(transport=transport) + + + """ + + def pre_create_cluster( + self, + request: managed_kafka.CreateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.CreateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_create_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cluster + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_create_topic( + self, + request: managed_kafka.CreateTopicRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.CreateTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_create_topic(self, response: resources.Topic) -> resources.Topic: + """Post-rpc interceptor for create_topic + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_delete_cluster( + self, + request: managed_kafka.DeleteClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_delete_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_delete_consumer_group( + self, + request: managed_kafka.DeleteConsumerGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.DeleteConsumerGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_consumer_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def pre_delete_topic( + self, + request: managed_kafka.DeleteTopicRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.DeleteTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def pre_get_cluster( + self, + request: managed_kafka.GetClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: + """Post-rpc interceptor for get_cluster + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_get_consumer_group( + self, + request: managed_kafka.GetConsumerGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.GetConsumerGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_consumer_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_get_consumer_group( + self, response: resources.ConsumerGroup + ) -> resources.ConsumerGroup: + """Post-rpc interceptor for get_consumer_group + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_get_topic( + self, + request: managed_kafka.GetTopicRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.GetTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_get_topic(self, response: resources.Topic) -> resources.Topic: + """Post-rpc interceptor for get_topic + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_list_clusters( + self, + request: managed_kafka.ListClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_list_clusters( + self, response: managed_kafka.ListClustersResponse + ) -> managed_kafka.ListClustersResponse: + """Post-rpc interceptor for list_clusters + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_list_consumer_groups( + self, + request: managed_kafka.ListConsumerGroupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.ListConsumerGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_consumer_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_list_consumer_groups( + self, response: managed_kafka.ListConsumerGroupsResponse + ) -> managed_kafka.ListConsumerGroupsResponse: + """Post-rpc interceptor for list_consumer_groups + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_list_topics( + self, + request: managed_kafka.ListTopicsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.ListTopicsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_topics + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_list_topics( + self, response: managed_kafka.ListTopicsResponse + ) -> managed_kafka.ListTopicsResponse: + """Post-rpc interceptor for list_topics + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, + request: managed_kafka.UpdateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_update_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_update_consumer_group( + self, + request: managed_kafka.UpdateConsumerGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.UpdateConsumerGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_consumer_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_update_consumer_group( + self, response: resources.ConsumerGroup + ) -> resources.ConsumerGroup: + """Post-rpc interceptor for update_consumer_group + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_update_topic( + self, + request: managed_kafka.UpdateTopicRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[managed_kafka.UpdateTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_update_topic(self, response: resources.Topic) -> resources.Topic: + """Post-rpc interceptor for update_topic + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ManagedKafka server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ManagedKafka server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ManagedKafkaRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ManagedKafkaRestInterceptor + + +class ManagedKafkaRestTransport(ManagedKafkaTransport): + """REST backend transport for ManagedKafka. + + The service that a client application uses to manage Apache + Kafka clusters, topics and consumer groups. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "managedkafka.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ManagedKafkaRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'managedkafka.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ManagedKafkaRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateCluster(ManagedKafkaRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.managed_kafka.CreateClusterRequest): + The request object. Request for CreateCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/clusters", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = managed_kafka.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _CreateTopic(ManagedKafkaRestStub): + def __hash__(self): + return hash("CreateTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "topicId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.CreateTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Call the create topic method over HTTP. + + Args: + request (~.managed_kafka.CreateTopicRequest): + The request object. Request for CreateTopic. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Topic: + A Kafka topic in a given cluster. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/topics", + "body": "topic", + }, + ] + request, metadata = self._interceptor.pre_create_topic(request, metadata) + pb_request = managed_kafka.CreateTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Topic() + pb_resp = resources.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_topic(resp) + return resp + + class _DeleteCluster(ManagedKafkaRestStub): + def __hash__(self): + return hash("DeleteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.managed_kafka.DeleteClusterRequest): + The request object. Request for DeleteCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = managed_kafka.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DeleteConsumerGroup(ManagedKafkaRestStub): + def __hash__(self): + return hash("DeleteConsumerGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.DeleteConsumerGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete consumer group method over HTTP. + + Args: + request (~.managed_kafka.DeleteConsumerGroupRequest): + The request object. Request for DeleteConsumerGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/consumerGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_consumer_group( + request, metadata + ) + pb_request = managed_kafka.DeleteConsumerGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTopic(ManagedKafkaRestStub): + def __hash__(self): + return hash("DeleteTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.DeleteTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete topic method over HTTP. + + Args: + request (~.managed_kafka.DeleteTopicRequest): + The request object. Request for DeleteTopic. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/topics/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_topic(request, metadata) + pb_request = managed_kafka.DeleteTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetCluster(ManagedKafkaRestStub): + def __hash__(self): + return hash("GetCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.managed_kafka.GetClusterRequest): + The request object. Request for GetCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Cluster: + An Apache Kafka cluster deployed in a + location. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = managed_kafka.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Cluster() + pb_resp = resources.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _GetConsumerGroup(ManagedKafkaRestStub): + def __hash__(self): + return hash("GetConsumerGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.GetConsumerGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConsumerGroup: + r"""Call the get consumer group method over HTTP. + + Args: + request (~.managed_kafka.GetConsumerGroupRequest): + The request object. Request for GetConsumerGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.ConsumerGroup: + A Kafka consumer group in a given + cluster. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/consumerGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_consumer_group( + request, metadata + ) + pb_request = managed_kafka.GetConsumerGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.ConsumerGroup() + pb_resp = resources.ConsumerGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_consumer_group(resp) + return resp + + class _GetTopic(ManagedKafkaRestStub): + def __hash__(self): + return hash("GetTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.GetTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Call the get topic method over HTTP. + + Args: + request (~.managed_kafka.GetTopicRequest): + The request object. Request for GetTopic. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Topic: + A Kafka topic in a given cluster. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/topics/*}", + }, + ] + request, metadata = self._interceptor.pre_get_topic(request, metadata) + pb_request = managed_kafka.GetTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Topic() + pb_resp = resources.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_topic(resp) + return resp + + class _ListClusters(ManagedKafkaRestStub): + def __hash__(self): + return hash("ListClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.ListClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> managed_kafka.ListClustersResponse: + r"""Call the list clusters method over HTTP. + + Args: + request (~.managed_kafka.ListClustersRequest): + The request object. Request for ListClusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.managed_kafka.ListClustersResponse: + Response for ListClusters. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/clusters", + }, + ] + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = managed_kafka.ListClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = managed_kafka.ListClustersResponse() + pb_resp = managed_kafka.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) + return resp + + class _ListConsumerGroups(ManagedKafkaRestStub): + def __hash__(self): + return hash("ListConsumerGroups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.ListConsumerGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> managed_kafka.ListConsumerGroupsResponse: + r"""Call the list consumer groups method over HTTP. + + Args: + request (~.managed_kafka.ListConsumerGroupsRequest): + The request object. Request for ListConsumerGroups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.managed_kafka.ListConsumerGroupsResponse: + Response for ListConsumerGroups. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/consumerGroups", + }, + ] + request, metadata = self._interceptor.pre_list_consumer_groups( + request, metadata + ) + pb_request = managed_kafka.ListConsumerGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = managed_kafka.ListConsumerGroupsResponse() + pb_resp = managed_kafka.ListConsumerGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_consumer_groups(resp) + return resp + + class _ListTopics(ManagedKafkaRestStub): + def __hash__(self): + return hash("ListTopics") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.ListTopicsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> managed_kafka.ListTopicsResponse: + r"""Call the list topics method over HTTP. + + Args: + request (~.managed_kafka.ListTopicsRequest): + The request object. Request for ListTopics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.managed_kafka.ListTopicsResponse: + Response for ListTopics. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/topics", + }, + ] + request, metadata = self._interceptor.pre_list_topics(request, metadata) + pb_request = managed_kafka.ListTopicsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = managed_kafka.ListTopicsResponse() + pb_resp = managed_kafka.ListTopicsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topics(resp) + return resp + + class _UpdateCluster(ManagedKafkaRestStub): + def __hash__(self): + return hash("UpdateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.UpdateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update cluster method over HTTP. + + Args: + request (~.managed_kafka.UpdateClusterRequest): + The request object. Request for UpdateCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{cluster.name=projects/*/locations/*/clusters/*}", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = managed_kafka.UpdateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) + return resp + + class _UpdateConsumerGroup(ManagedKafkaRestStub): + def __hash__(self): + return hash("UpdateConsumerGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.UpdateConsumerGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConsumerGroup: + r"""Call the update consumer group method over HTTP. + + Args: + request (~.managed_kafka.UpdateConsumerGroupRequest): + The request object. Request for UpdateConsumerGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.ConsumerGroup: + A Kafka consumer group in a given + cluster. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{consumer_group.name=projects/*/locations/*/clusters/*/consumerGroups/*}", + "body": "consumer_group", + }, + ] + request, metadata = self._interceptor.pre_update_consumer_group( + request, metadata + ) + pb_request = managed_kafka.UpdateConsumerGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.ConsumerGroup() + pb_resp = resources.ConsumerGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_consumer_group(resp) + return resp + + class _UpdateTopic(ManagedKafkaRestStub): + def __hash__(self): + return hash("UpdateTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: managed_kafka.UpdateTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Topic: + r"""Call the update topic method over HTTP. + + Args: + request (~.managed_kafka.UpdateTopicRequest): + The request object. Request for UpdateTopic. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Topic: + A Kafka topic in a given cluster. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{topic.name=projects/*/locations/*/clusters/*/topics/*}", + "body": "topic", + }, + ] + request, metadata = self._interceptor.pre_update_topic(request, metadata) + pb_request = managed_kafka.UpdateTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Topic() + pb_resp = resources.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_topic(resp) + return resp + + @property + def create_cluster( + self, + ) -> Callable[[managed_kafka.CreateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_topic( + self, + ) -> Callable[[managed_kafka.CreateTopicRequest], resources.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cluster( + self, + ) -> Callable[[managed_kafka.DeleteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_consumer_group( + self, + ) -> Callable[[managed_kafka.DeleteConsumerGroupRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConsumerGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_topic( + self, + ) -> Callable[[managed_kafka.DeleteTopicRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cluster( + self, + ) -> Callable[[managed_kafka.GetClusterRequest], resources.Cluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_consumer_group( + self, + ) -> Callable[[managed_kafka.GetConsumerGroupRequest], resources.ConsumerGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConsumerGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_topic(self) -> Callable[[managed_kafka.GetTopicRequest], resources.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_clusters( + self, + ) -> Callable[ + [managed_kafka.ListClustersRequest], managed_kafka.ListClustersResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_consumer_groups( + self, + ) -> Callable[ + [managed_kafka.ListConsumerGroupsRequest], + managed_kafka.ListConsumerGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConsumerGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topics( + self, + ) -> Callable[[managed_kafka.ListTopicsRequest], managed_kafka.ListTopicsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopics(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cluster( + self, + ) -> Callable[[managed_kafka.UpdateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_consumer_group( + self, + ) -> Callable[[managed_kafka.UpdateConsumerGroupRequest], resources.ConsumerGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConsumerGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_topic( + self, + ) -> Callable[[managed_kafka.UpdateTopicRequest], resources.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ManagedKafkaRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ManagedKafkaRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ManagedKafkaRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ManagedKafkaRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ManagedKafkaRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ManagedKafkaRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ManagedKafkaRestTransport",) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/__init__.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/__init__.py new file mode 100644 index 000000000000..5c8f1d8b7fc1 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/__init__.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .managed_kafka import ( + CreateClusterRequest, + CreateTopicRequest, + DeleteClusterRequest, + DeleteConsumerGroupRequest, + DeleteTopicRequest, + GetClusterRequest, + GetConsumerGroupRequest, + GetTopicRequest, + ListClustersRequest, + ListClustersResponse, + ListConsumerGroupsRequest, + ListConsumerGroupsResponse, + ListTopicsRequest, + ListTopicsResponse, + UpdateClusterRequest, + UpdateConsumerGroupRequest, + UpdateTopicRequest, +) +from .resources import ( + AccessConfig, + CapacityConfig, + Cluster, + ConsumerGroup, + ConsumerPartitionMetadata, + ConsumerTopicMetadata, + GcpConfig, + NetworkConfig, + OperationMetadata, + RebalanceConfig, + Topic, +) + +__all__ = ( + "CreateClusterRequest", + "CreateTopicRequest", + "DeleteClusterRequest", + "DeleteConsumerGroupRequest", + "DeleteTopicRequest", + "GetClusterRequest", + "GetConsumerGroupRequest", + "GetTopicRequest", + "ListClustersRequest", + "ListClustersResponse", + "ListConsumerGroupsRequest", + "ListConsumerGroupsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "UpdateClusterRequest", + "UpdateConsumerGroupRequest", + "UpdateTopicRequest", + "AccessConfig", + "CapacityConfig", + "Cluster", + "ConsumerGroup", + "ConsumerPartitionMetadata", + "ConsumerTopicMetadata", + "GcpConfig", + "NetworkConfig", + "OperationMetadata", + "RebalanceConfig", + "Topic", +) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/managed_kafka.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/managed_kafka.py new file mode 100644 index 000000000000..85b456c9cdb5 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/managed_kafka.py @@ -0,0 +1,577 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.managedkafka_v1.types import resources + +__protobuf__ = proto.module( + package="google.cloud.managedkafka.v1", + manifest={ + "ListClustersRequest", + "ListClustersResponse", + "GetClusterRequest", + "CreateClusterRequest", + "UpdateClusterRequest", + "DeleteClusterRequest", + "ListTopicsRequest", + "ListTopicsResponse", + "GetTopicRequest", + "CreateTopicRequest", + "UpdateTopicRequest", + "DeleteTopicRequest", + "ListConsumerGroupsRequest", + "ListConsumerGroupsResponse", + "GetConsumerGroupRequest", + "UpdateConsumerGroupRequest", + "DeleteConsumerGroupRequest", + }, +) + + +class ListClustersRequest(proto.Message): + r"""Request for ListClusters. + + Attributes: + parent (str): + Required. The parent location whose clusters are to be + listed. Structured like + ``projects/{project}/locations/{location}``. + page_size (int): + Optional. The maximum number of clusters to + return. The service may return fewer than this + value. If unspecified, server will pick an + appropriate default. + page_token (str): + Optional. A page token, received from a previous + ``ListClusters`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListClusters`` must match the call that provided the page + token. + filter (str): + Optional. Filter expression for the result. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListClustersResponse(proto.Message): + r"""Response for ListClusters. + + Attributes: + clusters (MutableSequence[google.cloud.managedkafka_v1.types.Cluster]): + The list of Clusters in the requested parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page of results. If this field is omitted, there are no + more results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + clusters: MutableSequence[resources.Cluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Cluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetClusterRequest(proto.Message): + r"""Request for GetCluster. + + Attributes: + name (str): + Required. The name of the cluster whose + configuration to return. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateClusterRequest(proto.Message): + r"""Request for CreateCluster. + + Attributes: + parent (str): + Required. The parent region in which to create the cluster. + Structured like ``projects/{project}/locations/{location}``. + cluster_id (str): + Required. The ID to use for the cluster, which will become + the final component of the cluster's name. The ID must be + 1-63 characters long, and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` to comply with RFC 1035. + + This value is structured like: ``my-cluster-id``. + cluster (google.cloud.managedkafka_v1.types.Cluster): + Required. Configuration of the cluster to create. Its + ``name`` field is ignored. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID to avoid + duplication of requests. If a request times out + or fails, retrying with the same ID allows the + server to recognize the previous attempt. For at + least 60 minutes, the server ignores duplicate + requests bearing the same ID. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID within 60 minutes of the last request, the + server checks if an original operation with the + same request ID was received. If so, the server + ignores the second request. + + The request ID must be a valid UUID. A zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateClusterRequest(proto.Message): + r"""Request for UpdateCluster. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. The mask is required and a value of \* + will update all fields. + cluster (google.cloud.managedkafka_v1.types.Cluster): + Required. The cluster to update. Its ``name`` field must be + populated. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID to avoid + duplication of requests. If a request times out + or fails, retrying with the same ID allows the + server to recognize the previous attempt. For at + least 60 minutes, the server ignores duplicate + requests bearing the same ID. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID within 60 minutes of the last request, the + server checks if an original operation with the + same request ID was received. If so, the server + ignores the second request. + + The request ID must be a valid UUID. A zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteClusterRequest(proto.Message): + r"""Request for DeleteCluster. + + Attributes: + name (str): + Required. The name of the cluster to delete. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID to avoid + duplication of requests. If a request times out + or fails, retrying with the same ID allows the + server to recognize the previous attempt. For at + least 60 minutes, the server ignores duplicate + requests bearing the same ID. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID within 60 minutes of the last request, the + server checks if an original operation with the + same request ID was received. If so, the server + ignores the second request. + + The request ID must be a valid UUID. A zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTopicsRequest(proto.Message): + r"""Request for ListTopics. + + Attributes: + parent (str): + Required. The parent cluster whose topics are to be listed. + Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + page_size (int): + Optional. The maximum number of topics to + return. The service may return fewer than this + value. If unset or zero, all topics for the + parent is returned. + page_token (str): + Optional. A page token, received from a previous + ``ListTopics`` call. Provide this to retrieve the subsequent + page. + + When paginating, all other parameters provided to + ``ListTopics`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicsResponse(proto.Message): + r"""Response for ListTopics. + + Attributes: + topics (MutableSequence[google.cloud.managedkafka_v1.types.Topic]): + The list of topics in the requested parent. + The order of the topics is unspecified. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page of results. If this field is omitted, there are no + more results. + """ + + @property + def raw_page(self): + return self + + topics: MutableSequence[resources.Topic] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Topic, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTopicRequest(proto.Message): + r"""Request for GetTopic. + + Attributes: + name (str): + Required. The name of the topic whose + configuration to return. Structured like: + + projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateTopicRequest(proto.Message): + r"""Request for CreateTopic. + + Attributes: + parent (str): + Required. The parent cluster in which to create the topic. + Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + topic_id (str): + Required. The ID to use for the topic, which will become the + final component of the topic's name. + + This value is structured like: ``my-topic-name``. + topic (google.cloud.managedkafka_v1.types.Topic): + Required. Configuration of the topic to create. Its ``name`` + field is ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + topic_id: str = proto.Field( + proto.STRING, + number=2, + ) + topic: resources.Topic = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Topic, + ) + + +class UpdateTopicRequest(proto.Message): + r"""Request for UpdateTopic. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Topic resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. The mask is required and a value of \* will + update all fields. + topic (google.cloud.managedkafka_v1.types.Topic): + Required. The topic to update. Its ``name`` field must be + populated. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + topic: resources.Topic = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Topic, + ) + + +class DeleteTopicRequest(proto.Message): + r"""Request for DeleteTopic. + + Attributes: + name (str): + Required. The name of the topic to delete. + ``projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConsumerGroupsRequest(proto.Message): + r"""Request for ListConsumerGroups. + + Attributes: + parent (str): + Required. The parent cluster whose consumer groups are to be + listed. Structured like + ``projects/{project}/locations/{location}/clusters/{cluster}``. + page_size (int): + Optional. The maximum number of consumer + groups to return. The service may return fewer + than this value. If unset or zero, all consumer + groups for the parent is returned. + page_token (str): + Optional. A page token, received from a previous + ``ListConsumerGroups`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListConsumerGroups`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListConsumerGroupsResponse(proto.Message): + r"""Response for ListConsumerGroups. + + Attributes: + consumer_groups (MutableSequence[google.cloud.managedkafka_v1.types.ConsumerGroup]): + The list of consumer group in the requested + parent. The order of the consumer groups is + unspecified. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page of results. If this field is omitted, there are no + more results. + """ + + @property + def raw_page(self): + return self + + consumer_groups: MutableSequence[resources.ConsumerGroup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.ConsumerGroup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetConsumerGroupRequest(proto.Message): + r"""Request for GetConsumerGroup. + + Attributes: + name (str): + Required. The name of the consumer group whose configuration + to return. + ``projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateConsumerGroupRequest(proto.Message): + r"""Request for UpdateConsumerGroup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ConsumerGroup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. The mask is required and a value of \* + will update all fields. + consumer_group (google.cloud.managedkafka_v1.types.ConsumerGroup): + Required. The consumer group to update. Its ``name`` field + must be populated. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + consumer_group: resources.ConsumerGroup = proto.Field( + proto.MESSAGE, + number=2, + message=resources.ConsumerGroup, + ) + + +class DeleteConsumerGroupRequest(proto.Message): + r"""Request for DeleteConsumerGroup. + + Attributes: + name (str): + Required. The name of the consumer group to delete. + ``projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py new file mode 100644 index 000000000000..21d8aad67564 --- /dev/null +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py @@ -0,0 +1,440 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.managedkafka.v1", + manifest={ + "Cluster", + "CapacityConfig", + "RebalanceConfig", + "NetworkConfig", + "AccessConfig", + "GcpConfig", + "Topic", + "ConsumerTopicMetadata", + "ConsumerPartitionMetadata", + "ConsumerGroup", + "OperationMetadata", + }, +) + + +class Cluster(proto.Message): + r"""An Apache Kafka cluster deployed in a location. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_config (google.cloud.managedkafka_v1.types.GcpConfig): + Required. Configuration properties for a + Kafka cluster deployed to Google Cloud Platform. + + This field is a member of `oneof`_ ``platform_config``. + name (str): + Identifier. The name of the cluster. Structured like: + projects/{project_number}/locations/{location}/clusters/{cluster_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the cluster was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the cluster was + last updated. + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs. + capacity_config (google.cloud.managedkafka_v1.types.CapacityConfig): + Required. Capacity configuration for the + Kafka cluster. + rebalance_config (google.cloud.managedkafka_v1.types.RebalanceConfig): + Optional. Rebalance configuration for the + Kafka cluster. + state (google.cloud.managedkafka_v1.types.Cluster.State): + Output only. The current state of the + cluster. + """ + + class State(proto.Enum): + r"""The state of the cluster. + + Values: + STATE_UNSPECIFIED (0): + A state was not specified. + CREATING (1): + The cluster is being created. + ACTIVE (2): + The cluster is active. + DELETING (3): + The cluster is being deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + + gcp_config: "GcpConfig" = proto.Field( + proto.MESSAGE, + number=9, + oneof="platform_config", + message="GcpConfig", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + capacity_config: "CapacityConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="CapacityConfig", + ) + rebalance_config: "RebalanceConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="RebalanceConfig", + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + + +class CapacityConfig(proto.Message): + r"""A capacity configuration of a Kafka cluster. + + Attributes: + vcpu_count (int): + Required. The number of vCPUs to provision + for the cluster. Minimum: 3. + memory_bytes (int): + Required. The memory to provision for the + cluster in bytes. The CPU:memory ratio + (vCPU:GiB) must be between 1:1 and 1:8. Minimum: + 3221225472 (3 GiB). + """ + + vcpu_count: int = proto.Field( + proto.INT64, + number=1, + ) + memory_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + + +class RebalanceConfig(proto.Message): + r"""Defines rebalancing behavior of a Kafka cluster. + + Attributes: + mode (google.cloud.managedkafka_v1.types.RebalanceConfig.Mode): + Optional. The rebalance behavior for the cluster. When not + specified, defaults to ``NO_REBALANCE``. + """ + + class Mode(proto.Enum): + r"""The partition rebalance mode for the cluster. + + Values: + MODE_UNSPECIFIED (0): + A mode was not specified. Do not use. + NO_REBALANCE (1): + Do not rebalance automatically. + AUTO_REBALANCE_ON_SCALE_UP (2): + Automatically rebalance topic partitions + among brokers when the cluster is scaled up. + """ + MODE_UNSPECIFIED = 0 + NO_REBALANCE = 1 + AUTO_REBALANCE_ON_SCALE_UP = 2 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + + +class NetworkConfig(proto.Message): + r"""The configuration of a Virtual Private Cloud (VPC) network + that can access the Kafka cluster. + + Attributes: + subnet (str): + Required. Name of the VPC subnet in which to create Private + Service Connect (PSC) endpoints for the Kafka brokers and + bootstrap address. Structured like: + projects/{project}/regions/{region}/subnetworks/{subnet_id} + + The subnet must be located in the same region as the Kafka + cluster. The project may differ. Multiple subnets from the + same parent network must not be specified. + + The CIDR range of the subnet must be within the IPv4 address + ranges for private networks, as specified in RFC 1918. + """ + + subnet: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AccessConfig(proto.Message): + r"""The configuration of access to the Kafka cluster. + + Attributes: + network_configs (MutableSequence[google.cloud.managedkafka_v1.types.NetworkConfig]): + Required. Virtual Private Cloud (VPC) + networks that must be granted direct access to + the Kafka cluster. Minimum of 1 network is + required. Maximum 10 networks can be specified. + """ + + network_configs: MutableSequence["NetworkConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NetworkConfig", + ) + + +class GcpConfig(proto.Message): + r"""Configuration properties for a Kafka cluster deployed to + Google Cloud Platform. + + Attributes: + access_config (google.cloud.managedkafka_v1.types.AccessConfig): + Required. Access configuration for the Kafka + cluster. + kms_key (str): + Optional. Immutable. The Cloud KMS Key name to use for + encryption. The key must be located in the same region as + the cluster and cannot be changed. Structured like: + projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}. + Note that the project component only accepts a project ID, + and not a project number. + """ + + access_config: "AccessConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="AccessConfig", + ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Topic(proto.Message): + r"""A Kafka topic in a given cluster. + + Attributes: + name (str): + Identifier. The name of the topic. The ``topic`` segment is + used when connecting directly to the cluster. Structured + like: + projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic} + partition_count (int): + Required. The number of partitions this topic + has. The partition count can only be increased, + not decreased. Please note that if partitions + are increased for a topic that has a key, the + partitioning logic or the ordering of the + messages will be affected. + replication_factor (int): + Required. Immutable. The number of replicas + of each partition. A replication factor of 3 is + recommended for high availability. + configs (MutableMapping[str, str]): + Optional. Configurations for the topic that are overridden + from the cluster defaults. The key of the map is a Kafka + topic property name, for example: ``cleanup.policy``, + ``compression.type``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + partition_count: int = proto.Field( + proto.INT32, + number=2, + ) + replication_factor: int = proto.Field( + proto.INT32, + number=3, + ) + configs: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class ConsumerTopicMetadata(proto.Message): + r"""Metadata for a consumer group corresponding to a specific + topic. + + Attributes: + partitions (MutableMapping[int, google.cloud.managedkafka_v1.types.ConsumerPartitionMetadata]): + Optional. Metadata for this consumer group + and topic for all partition indexes it has + metadata for. + """ + + partitions: MutableMapping[int, "ConsumerPartitionMetadata"] = proto.MapField( + proto.INT32, + proto.MESSAGE, + number=1, + message="ConsumerPartitionMetadata", + ) + + +class ConsumerPartitionMetadata(proto.Message): + r"""Metadata for a consumer group corresponding to a specific + partition. + + Attributes: + offset (int): + Required. The offset for this partition, or 0 + if no offset has been committed. + metadata (str): + Optional. The associated metadata for this + partition, or empty if it does not exist. + """ + + offset: int = proto.Field( + proto.INT64, + number=1, + ) + metadata: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ConsumerGroup(proto.Message): + r"""A Kafka consumer group in a given cluster. + + Attributes: + name (str): + Identifier. The name of the consumer group. The + ``consumer_group`` segment is used when connecting directly + to the cluster. Structured like: + projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumer_group} + topics (MutableMapping[str, google.cloud.managedkafka_v1.types.ConsumerTopicMetadata]): + Optional. Metadata for this consumer group + for all topics it has metadata for. The key of + the map is a topic name, structured like: + + projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + topics: MutableMapping[str, "ConsumerTopicMetadata"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="ConsumerTopicMetadata", + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-managedkafka/mypy.ini b/packages/google-cloud-managedkafka/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-managedkafka/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-managedkafka/noxfile.py b/packages/google-cloud-managedkafka/noxfile.py new file mode 100644 index 000000000000..1e6cd48d0529 --- /dev/null +++ b/packages/google-cloud-managedkafka/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_cluster_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_cluster_async.py new file mode 100644 index 000000000000..79d7b5375b03 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_cluster_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_CreateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_create_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_CreateCluster_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_cluster_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_cluster_sync.py new file mode 100644 index 000000000000..434b20484afc --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_cluster_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_CreateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_create_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_CreateCluster_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_topic_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_topic_async.py new file mode 100644 index 000000000000..8b360e8118a3 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_topic_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_CreateTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_create_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.CreateTopicRequest( + parent="parent_value", + topic_id="topic_id_value", + topic=topic, + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_CreateTopic_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_topic_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_topic_sync.py new file mode 100644 index 000000000000..f6d5af066487 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_create_topic_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_CreateTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_create_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.CreateTopicRequest( + parent="parent_value", + topic_id="topic_id_value", + topic=topic, + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_CreateTopic_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_cluster_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_cluster_async.py new file mode 100644 index 000000000000..2bd140df30fd --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_DeleteCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_delete_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_DeleteCluster_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_cluster_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_cluster_sync.py new file mode 100644 index 000000000000..80e7233c911d --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_DeleteCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_delete_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_DeleteCluster_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_consumer_group_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_consumer_group_async.py new file mode 100644 index 000000000000..a028587e98b0 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_consumer_group_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConsumerGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_DeleteConsumerGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_delete_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteConsumerGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_consumer_group(request=request) + + +# [END managedkafka_v1_generated_ManagedKafka_DeleteConsumerGroup_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_consumer_group_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_consumer_group_sync.py new file mode 100644 index 000000000000..39dbfea7b954 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_consumer_group_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConsumerGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_DeleteConsumerGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_delete_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteConsumerGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_consumer_group(request=request) + + +# [END managedkafka_v1_generated_ManagedKafka_DeleteConsumerGroup_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_topic_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_topic_async.py new file mode 100644 index 000000000000..314d0637ea83 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_topic_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_DeleteTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_delete_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteTopicRequest( + name="name_value", + ) + + # Make the request + await client.delete_topic(request=request) + + +# [END managedkafka_v1_generated_ManagedKafka_DeleteTopic_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_topic_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_topic_sync.py new file mode 100644 index 000000000000..55ca1768a6a7 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_delete_topic_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_DeleteTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_delete_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.DeleteTopicRequest( + name="name_value", + ) + + # Make the request + client.delete_topic(request=request) + + +# [END managedkafka_v1_generated_ManagedKafka_DeleteTopic_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_cluster_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_cluster_async.py new file mode 100644 index 000000000000..0a976de646d7 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_cluster_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_GetCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_get_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_GetCluster_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_cluster_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_cluster_sync.py new file mode 100644 index 000000000000..91e0634abb3f --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_GetCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_get_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_GetCluster_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_consumer_group_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_consumer_group_async.py new file mode 100644 index 000000000000..a2c263fe1e4c --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_consumer_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConsumerGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_GetConsumerGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_get_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetConsumerGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_consumer_group(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_GetConsumerGroup_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_consumer_group_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_consumer_group_sync.py new file mode 100644 index 000000000000..57864a86c774 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_consumer_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConsumerGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_GetConsumerGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_get_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetConsumerGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_consumer_group(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_GetConsumerGroup_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_topic_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_topic_async.py new file mode 100644 index 000000000000..6f12e700c5cf --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_topic_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_GetTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_get_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetTopicRequest( + name="name_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_GetTopic_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_topic_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_topic_sync.py new file mode 100644 index 000000000000..7eadf98d88b8 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_get_topic_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_GetTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_get_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.GetTopicRequest( + name="name_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_GetTopic_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_clusters_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_clusters_async.py new file mode 100644 index 000000000000..c83b4c8c6e2d --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_clusters_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_ListClusters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_list_clusters(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_ListClusters_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_clusters_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_clusters_sync.py new file mode 100644 index 000000000000..cea10cc11f0c --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_ListClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_list_clusters(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_ListClusters_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_consumer_groups_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_consumer_groups_async.py new file mode 100644 index 000000000000..8642c6a36dc6 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_consumer_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConsumerGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_ListConsumerGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_list_consumer_groups(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListConsumerGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_consumer_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_ListConsumerGroups_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_consumer_groups_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_consumer_groups_sync.py new file mode 100644 index 000000000000..e84fb24f13e2 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_consumer_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConsumerGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_ListConsumerGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_list_consumer_groups(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListConsumerGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_consumer_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_ListConsumerGroups_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_topics_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_topics_async.py new file mode 100644 index 000000000000..2709cdd159ae --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_topics_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_ListTopics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_list_topics(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListTopicsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_ListTopics_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_topics_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_topics_sync.py new file mode 100644 index 000000000000..6fb3377eda05 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_list_topics_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_ListTopics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_list_topics(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.ListTopicsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_ListTopics_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_cluster_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_cluster_async.py new file mode 100644 index 000000000000..6924fca95523 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_cluster_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_UpdateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_update_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_UpdateCluster_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_cluster_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_cluster_sync.py new file mode 100644 index 000000000000..4ac865e49d44 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_cluster_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_UpdateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_update_cluster(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + cluster = managedkafka_v1.Cluster() + cluster.gcp_config.access_config.network_configs.subnet = "subnet_value" + cluster.capacity_config.vcpu_count = 1094 + cluster.capacity_config.memory_bytes = 1311 + + request = managedkafka_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_UpdateCluster_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_consumer_group_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_consumer_group_async.py new file mode 100644 index 000000000000..9c64f598b850 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_consumer_group_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConsumerGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_UpdateConsumerGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_update_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + request = managedkafka_v1.UpdateConsumerGroupRequest( + ) + + # Make the request + response = await client.update_consumer_group(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_UpdateConsumerGroup_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_consumer_group_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_consumer_group_sync.py new file mode 100644 index 000000000000..bc96b7c4c2ac --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_consumer_group_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConsumerGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_UpdateConsumerGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_update_consumer_group(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + request = managedkafka_v1.UpdateConsumerGroupRequest( + ) + + # Make the request + response = client.update_consumer_group(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_UpdateConsumerGroup_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_topic_async.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_topic_async.py new file mode 100644 index 000000000000..d922730296be --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_topic_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_UpdateTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +async def sample_update_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaAsyncClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_UpdateTopic_async] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_topic_sync.py b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_topic_sync.py new file mode 100644 index 000000000000..64f56b52277e --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/managedkafka_v1_generated_managed_kafka_update_topic_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-managedkafka + + +# [START managedkafka_v1_generated_ManagedKafka_UpdateTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import managedkafka_v1 + + +def sample_update_topic(): + # Create a client + client = managedkafka_v1.ManagedKafkaClient() + + # Initialize request argument(s) + topic = managedkafka_v1.Topic() + topic.partition_count = 1634 + topic.replication_factor = 1912 + + request = managedkafka_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle the response + print(response) + +# [END managedkafka_v1_generated_ManagedKafka_UpdateTopic_sync] diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json new file mode 100644 index 000000000000..37b09a104f58 --- /dev/null +++ b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json @@ -0,0 +1,2313 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.managedkafka.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-managedkafka", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.create_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.CreateCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.managedkafka_v1.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "managedkafka_v1_generated_managed_kafka_create_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_CreateCluster_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_create_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.create_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.CreateCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.managedkafka_v1.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "managedkafka_v1_generated_managed_kafka_create_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_CreateCluster_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_create_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.create_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.CreateTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.CreateTopicRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "topic", + "type": "google.cloud.managedkafka_v1.types.Topic" + }, + { + "name": "topic_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "managedkafka_v1_generated_managed_kafka_create_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_CreateTopic_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_create_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.create_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.CreateTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.CreateTopicRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "topic", + "type": "google.cloud.managedkafka_v1.types.Topic" + }, + { + "name": "topic_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "managedkafka_v1_generated_managed_kafka_create_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_CreateTopic_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_create_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.delete_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.DeleteCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "managedkafka_v1_generated_managed_kafka_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_DeleteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_delete_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.delete_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.DeleteCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "managedkafka_v1_generated_managed_kafka_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_DeleteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_delete_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.delete_consumer_group", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.DeleteConsumerGroup", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "DeleteConsumerGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.DeleteConsumerGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_consumer_group" + }, + "description": "Sample for DeleteConsumerGroup", + "file": "managedkafka_v1_generated_managed_kafka_delete_consumer_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_DeleteConsumerGroup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_delete_consumer_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.delete_consumer_group", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.DeleteConsumerGroup", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "DeleteConsumerGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.DeleteConsumerGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_consumer_group" + }, + "description": "Sample for DeleteConsumerGroup", + "file": "managedkafka_v1_generated_managed_kafka_delete_consumer_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_DeleteConsumerGroup_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_delete_consumer_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.delete_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.DeleteTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.DeleteTopicRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "managedkafka_v1_generated_managed_kafka_delete_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_DeleteTopic_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_delete_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.delete_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.DeleteTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.DeleteTopicRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "managedkafka_v1_generated_managed_kafka_delete_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_DeleteTopic_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_delete_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.get_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.GetCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "managedkafka_v1_generated_managed_kafka_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_GetCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_get_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.get_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.GetCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "managedkafka_v1_generated_managed_kafka_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_GetCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_get_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.get_consumer_group", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.GetConsumerGroup", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "GetConsumerGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.GetConsumerGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.ConsumerGroup", + "shortName": "get_consumer_group" + }, + "description": "Sample for GetConsumerGroup", + "file": "managedkafka_v1_generated_managed_kafka_get_consumer_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_GetConsumerGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_get_consumer_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.get_consumer_group", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.GetConsumerGroup", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "GetConsumerGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.GetConsumerGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.ConsumerGroup", + "shortName": "get_consumer_group" + }, + "description": "Sample for GetConsumerGroup", + "file": "managedkafka_v1_generated_managed_kafka_get_consumer_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_GetConsumerGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_get_consumer_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.get_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.GetTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.GetTopicRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "managedkafka_v1_generated_managed_kafka_get_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_GetTopic_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_get_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.get_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.GetTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.GetTopicRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "managedkafka_v1_generated_managed_kafka_get_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_GetTopic_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_get_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.list_clusters", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.ListClusters", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "managedkafka_v1_generated_managed_kafka_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_ListClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_list_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.list_clusters", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.ListClusters", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListClustersPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "managedkafka_v1_generated_managed_kafka_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_ListClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_list_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.list_consumer_groups", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.ListConsumerGroups", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "ListConsumerGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.ListConsumerGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListConsumerGroupsAsyncPager", + "shortName": "list_consumer_groups" + }, + "description": "Sample for ListConsumerGroups", + "file": "managedkafka_v1_generated_managed_kafka_list_consumer_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_ListConsumerGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_list_consumer_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.list_consumer_groups", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.ListConsumerGroups", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "ListConsumerGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.ListConsumerGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListConsumerGroupsPager", + "shortName": "list_consumer_groups" + }, + "description": "Sample for ListConsumerGroups", + "file": "managedkafka_v1_generated_managed_kafka_list_consumer_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_ListConsumerGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_list_consumer_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.list_topics", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.ListTopics", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.ListTopicsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListTopicsAsyncPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "managedkafka_v1_generated_managed_kafka_list_topics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_ListTopics_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_list_topics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.list_topics", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.ListTopics", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.ListTopicsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.services.managed_kafka.pagers.ListTopicsPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "managedkafka_v1_generated_managed_kafka_list_topics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_ListTopics_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_list_topics_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.update_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.UpdateCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.managedkafka_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "managedkafka_v1_generated_managed_kafka_update_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_UpdateCluster_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_update_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.update_cluster", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.UpdateCluster", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.managedkafka_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "managedkafka_v1_generated_managed_kafka_update_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_UpdateCluster_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_update_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.update_consumer_group", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.UpdateConsumerGroup", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "UpdateConsumerGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.UpdateConsumerGroupRequest" + }, + { + "name": "consumer_group", + "type": "google.cloud.managedkafka_v1.types.ConsumerGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.ConsumerGroup", + "shortName": "update_consumer_group" + }, + "description": "Sample for UpdateConsumerGroup", + "file": "managedkafka_v1_generated_managed_kafka_update_consumer_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_UpdateConsumerGroup_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_update_consumer_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.update_consumer_group", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.UpdateConsumerGroup", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "UpdateConsumerGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.UpdateConsumerGroupRequest" + }, + { + "name": "consumer_group", + "type": "google.cloud.managedkafka_v1.types.ConsumerGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.ConsumerGroup", + "shortName": "update_consumer_group" + }, + "description": "Sample for UpdateConsumerGroup", + "file": "managedkafka_v1_generated_managed_kafka_update_consumer_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_UpdateConsumerGroup_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_update_consumer_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient", + "shortName": "ManagedKafkaAsyncClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaAsyncClient.update_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.UpdateTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.UpdateTopicRequest" + }, + { + "name": "topic", + "type": "google.cloud.managedkafka_v1.types.Topic" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "managedkafka_v1_generated_managed_kafka_update_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_UpdateTopic_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_update_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient", + "shortName": "ManagedKafkaClient" + }, + "fullName": "google.cloud.managedkafka_v1.ManagedKafkaClient.update_topic", + "method": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka.UpdateTopic", + "service": { + "fullName": "google.cloud.managedkafka.v1.ManagedKafka", + "shortName": "ManagedKafka" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.managedkafka_v1.types.UpdateTopicRequest" + }, + { + "name": "topic", + "type": "google.cloud.managedkafka_v1.types.Topic" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.managedkafka_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "managedkafka_v1_generated_managed_kafka_update_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "managedkafka_v1_generated_ManagedKafka_UpdateTopic_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "managedkafka_v1_generated_managed_kafka_update_topic_sync.py" + } + ] +} diff --git a/packages/google-cloud-managedkafka/scripts/decrypt-secrets.sh b/packages/google-cloud-managedkafka/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-managedkafka/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-managedkafka/scripts/fixup_managedkafka_v1_keywords.py b/packages/google-cloud-managedkafka/scripts/fixup_managedkafka_v1_keywords.py new file mode 100644 index 000000000000..8eafa521a8b4 --- /dev/null +++ b/packages/google-cloud-managedkafka/scripts/fixup_managedkafka_v1_keywords.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class managedkafkaCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', ), + 'create_topic': ('parent', 'topic_id', 'topic', ), + 'delete_cluster': ('name', 'request_id', ), + 'delete_consumer_group': ('name', ), + 'delete_topic': ('name', ), + 'get_cluster': ('name', ), + 'get_consumer_group': ('name', ), + 'get_topic': ('name', ), + 'list_clusters': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_consumer_groups': ('parent', 'page_size', 'page_token', ), + 'list_topics': ('parent', 'page_size', 'page_token', ), + 'update_cluster': ('update_mask', 'cluster', 'request_id', ), + 'update_consumer_group': ('update_mask', 'consumer_group', ), + 'update_topic': ('update_mask', 'topic', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=managedkafkaCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the managedkafka client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-managedkafka/setup.py b/packages/google-cloud-managedkafka/setup.py new file mode 100644 index 000000000000..52573a9aa1d3 --- /dev/null +++ b/packages/google-cloud-managedkafka/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-managedkafka" + + +description = "Google Cloud Managedkafka API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/managedkafka/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-managedkafka/testing/.gitignore b/packages/google-cloud-managedkafka/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-managedkafka/testing/constraints-3.10.txt b/packages/google-cloud-managedkafka/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-managedkafka/testing/constraints-3.11.txt b/packages/google-cloud-managedkafka/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-managedkafka/testing/constraints-3.12.txt b/packages/google-cloud-managedkafka/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-managedkafka/testing/constraints-3.7.txt b/packages/google-cloud-managedkafka/testing/constraints-3.7.txt new file mode 100644 index 000000000000..b8a550c73855 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-cloud-managedkafka/testing/constraints-3.8.txt b/packages/google-cloud-managedkafka/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-managedkafka/testing/constraints-3.9.txt b/packages/google-cloud-managedkafka/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-managedkafka/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-managedkafka/tests/__init__.py b/packages/google-cloud-managedkafka/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-managedkafka/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-managedkafka/tests/unit/__init__.py b/packages/google-cloud-managedkafka/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-managedkafka/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-managedkafka/tests/unit/gapic/__init__.py b/packages/google-cloud-managedkafka/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-managedkafka/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/__init__.py b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py new file mode 100644 index 000000000000..84cf39d36388 --- /dev/null +++ b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py @@ -0,0 +1,14228 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.managedkafka_v1.services.managed_kafka import ( + ManagedKafkaAsyncClient, + ManagedKafkaClient, + pagers, + transports, +) +from google.cloud.managedkafka_v1.types import managed_kafka, resources + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ManagedKafkaClient._get_default_mtls_endpoint(None) is None + assert ( + ManagedKafkaClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ManagedKafkaClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ManagedKafkaClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ManagedKafkaClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ManagedKafkaClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert ManagedKafkaClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ManagedKafkaClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ManagedKafkaClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ManagedKafkaClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ManagedKafkaClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ManagedKafkaClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ManagedKafkaClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ManagedKafkaClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ManagedKafkaClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ManagedKafkaClient._get_client_cert_source(None, False) is None + assert ( + ManagedKafkaClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ManagedKafkaClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ManagedKafkaClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ManagedKafkaClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ManagedKafkaClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaClient), +) +@mock.patch.object( + ManagedKafkaAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ManagedKafkaClient._DEFAULT_UNIVERSE + default_endpoint = ManagedKafkaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ManagedKafkaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ManagedKafkaClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ManagedKafkaClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ManagedKafkaClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ManagedKafkaClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ManagedKafkaClient._get_api_endpoint(None, None, default_universe, "always") + == ManagedKafkaClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ManagedKafkaClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ManagedKafkaClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ManagedKafkaClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ManagedKafkaClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ManagedKafkaClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ManagedKafkaClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ManagedKafkaClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ManagedKafkaClient._get_universe_domain(None, None) + == ManagedKafkaClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ManagedKafkaClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ManagedKafkaClient, transports.ManagedKafkaGrpcTransport, "grpc"), + (ManagedKafkaClient, transports.ManagedKafkaRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ManagedKafkaClient, "grpc"), + (ManagedKafkaAsyncClient, "grpc_asyncio"), + (ManagedKafkaClient, "rest"), + ], +) +def test_managed_kafka_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "managedkafka.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://managedkafka.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ManagedKafkaGrpcTransport, "grpc"), + (transports.ManagedKafkaGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ManagedKafkaRestTransport, "rest"), + ], +) +def test_managed_kafka_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ManagedKafkaClient, "grpc"), + (ManagedKafkaAsyncClient, "grpc_asyncio"), + (ManagedKafkaClient, "rest"), + ], +) +def test_managed_kafka_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "managedkafka.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://managedkafka.googleapis.com" + ) + + +def test_managed_kafka_client_get_transport_class(): + transport = ManagedKafkaClient.get_transport_class() + available_transports = [ + transports.ManagedKafkaGrpcTransport, + transports.ManagedKafkaRestTransport, + ] + assert transport in available_transports + + transport = ManagedKafkaClient.get_transport_class("grpc") + assert transport == transports.ManagedKafkaGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ManagedKafkaClient, transports.ManagedKafkaGrpcTransport, "grpc"), + ( + ManagedKafkaAsyncClient, + transports.ManagedKafkaGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ManagedKafkaClient, transports.ManagedKafkaRestTransport, "rest"), + ], +) +@mock.patch.object( + ManagedKafkaClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaClient), +) +@mock.patch.object( + ManagedKafkaAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaAsyncClient), +) +def test_managed_kafka_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ManagedKafkaClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ManagedKafkaClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ManagedKafkaClient, transports.ManagedKafkaGrpcTransport, "grpc", "true"), + ( + ManagedKafkaAsyncClient, + transports.ManagedKafkaGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ManagedKafkaClient, transports.ManagedKafkaGrpcTransport, "grpc", "false"), + ( + ManagedKafkaAsyncClient, + transports.ManagedKafkaGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ManagedKafkaClient, transports.ManagedKafkaRestTransport, "rest", "true"), + (ManagedKafkaClient, transports.ManagedKafkaRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ManagedKafkaClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaClient), +) +@mock.patch.object( + ManagedKafkaAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_managed_kafka_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ManagedKafkaClient, ManagedKafkaAsyncClient]) +@mock.patch.object( + ManagedKafkaClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ManagedKafkaClient) +) +@mock.patch.object( + ManagedKafkaAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ManagedKafkaAsyncClient), +) +def test_managed_kafka_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [ManagedKafkaClient, ManagedKafkaAsyncClient]) +@mock.patch.object( + ManagedKafkaClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaClient), +) +@mock.patch.object( + ManagedKafkaAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ManagedKafkaAsyncClient), +) +def test_managed_kafka_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ManagedKafkaClient._DEFAULT_UNIVERSE + default_endpoint = ManagedKafkaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ManagedKafkaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ManagedKafkaClient, transports.ManagedKafkaGrpcTransport, "grpc"), + ( + ManagedKafkaAsyncClient, + transports.ManagedKafkaGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ManagedKafkaClient, transports.ManagedKafkaRestTransport, "rest"), + ], +) +def test_managed_kafka_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ManagedKafkaClient, + transports.ManagedKafkaGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ManagedKafkaAsyncClient, + transports.ManagedKafkaGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ManagedKafkaClient, transports.ManagedKafkaRestTransport, "rest", None), + ], +) +def test_managed_kafka_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_managed_kafka_client_client_options_from_dict(): + with mock.patch( + "google.cloud.managedkafka_v1.services.managed_kafka.transports.ManagedKafkaGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ManagedKafkaClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ManagedKafkaClient, + transports.ManagedKafkaGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ManagedKafkaAsyncClient, + transports.ManagedKafkaGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_managed_kafka_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "managedkafka.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="managedkafka.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.ListClustersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListClustersRequest() + + +def test_list_clusters_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.ListClustersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_clusters(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListClustersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_clusters_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_clusters_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_clusters + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_clusters + ] = mock_object + + request = {} + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.ListClustersRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.ListClustersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = managed_kafka.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + managed_kafka.ListClustersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + managed_kafka.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_pager(transport_name: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + managed_kafka.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_clusters(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + +def test_list_clusters_pages(transport_name: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + managed_kafka.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_clusters_async_pager(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + managed_kafka.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Cluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_clusters_async_pages(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + managed_kafka.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_clusters(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster( + name="name_value", + state=resources.Cluster.State.CREATING, + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.GetClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.state == resources.Cluster.State.CREATING + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetClusterRequest() + + +def test_get_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.GetClusterRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetClusterRequest( + name="name_value", + ) + + +def test_get_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Cluster( + name="name_value", + state=resources.Cluster.State.CREATING, + ) + ) + response = await client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cluster + ] = mock_object + + request = {} + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.GetClusterRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Cluster( + name="name_value", + state=resources.Cluster.State.CREATING, + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.GetClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.state == resources.Cluster.State.CREATING + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = resources.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + managed_kafka.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + managed_kafka.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.CreateClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.CreateClusterRequest() + + +def test_create_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + request = {} + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_cluster + ] = mock_object + + request = {} + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.CreateClusterRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.CreateClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + managed_kafka.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + managed_kafka.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.UpdateClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateClusterRequest() + + +def test_update_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.UpdateClusterRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateClusterRequest() + + +def test_update_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + request = {} + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cluster + ] = mock_object + + request = {} + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.UpdateClusterRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.UpdateClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + managed_kafka.UpdateClusterRequest(), + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + managed_kafka.UpdateClusterRequest(), + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.DeleteClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteClusterRequest() + + +def test_delete_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.DeleteClusterRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteClusterRequest( + name="name_value", + ) + + +def test_delete_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + request = {} + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_cluster + ] = mock_object + + request = {} + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.DeleteClusterRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.DeleteClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + managed_kafka.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + managed_kafka.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.ListTopicsRequest, + dict, + ], +) +def test_list_topics(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.ListTopicsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_topics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListTopicsRequest() + + +def test_list_topics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.ListTopicsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_topics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListTopicsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_topics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_topics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_topics] = mock_rpc + request = {} + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topics_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListTopicsRequest() + + +@pytest.mark.asyncio +async def test_list_topics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topics + ] = mock_object + + request = {} + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topics_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.ListTopicsRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.ListTopicsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_topics_async_from_dict(): + await test_list_topics_async(request_type=dict) + + +def test_list_topics_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.ListTopicsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value = managed_kafka.ListTopicsResponse() + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topics_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.ListTopicsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListTopicsResponse() + ) + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_topics_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListTopicsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topics( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_topics_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + managed_kafka.ListTopicsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_topics_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListTopicsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListTopicsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topics( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_topics_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topics( + managed_kafka.ListTopicsRequest(), + parent="parent_value", + ) + + +def test_list_topics_pager(transport_name: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + resources.Topic(), + ], + next_page_token="abc", + ), + managed_kafka.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + ], + next_page_token="ghi", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_topics(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Topic) for i in results) + + +def test_list_topics_pages(transport_name: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + resources.Topic(), + ], + next_page_token="abc", + ), + managed_kafka.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + ], + next_page_token="ghi", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topics_async_pager(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + resources.Topic(), + ], + next_page_token="abc", + ), + managed_kafka.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + ], + next_page_token="ghi", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topics( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Topic) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topics_async_pages(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + resources.Topic(), + ], + next_page_token="abc", + ), + managed_kafka.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + ], + next_page_token="ghi", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_topics(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.GetTopicRequest, + dict, + ], +) +def test_get_topic(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + response = client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.GetTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +def test_get_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetTopicRequest() + + +def test_get_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.GetTopicRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetTopicRequest( + name="name_value", + ) + + +def test_get_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_topic] = mock_rpc + request = {} + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + ) + response = await client.get_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetTopicRequest() + + +@pytest.mark.asyncio +async def test_get_topic_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_topic + ] = mock_object + + request = {} + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_topic_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.GetTopicRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + ) + response = await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.GetTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +@pytest.mark.asyncio +async def test_get_topic_async_from_dict(): + await test_get_topic_async(request_type=dict) + + +def test_get_topic_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.GetTopicRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value = resources.Topic() + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_topic_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.GetTopicRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Topic()) + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_topic_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_topic( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_topic_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + managed_kafka.GetTopicRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_topic_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_topic( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_topic_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_topic( + managed_kafka.GetTopicRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.CreateTopicRequest, + dict, + ], +) +def test_create_topic(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + response = client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.CreateTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +def test_create_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.CreateTopicRequest() + + +def test_create_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.CreateTopicRequest( + parent="parent_value", + topic_id="topic_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.CreateTopicRequest( + parent="parent_value", + topic_id="topic_id_value", + ) + + +def test_create_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_topic] = mock_rpc + request = {} + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + ) + response = await client.create_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.CreateTopicRequest() + + +@pytest.mark.asyncio +async def test_create_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_topic + ] = mock_object + + request = {} + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_topic_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.CreateTopicRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + ) + response = await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.CreateTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +@pytest.mark.asyncio +async def test_create_topic_async_from_dict(): + await test_create_topic_async(request_type=dict) + + +def test_create_topic_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.CreateTopicRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value = resources.Topic() + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_topic_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.CreateTopicRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Topic()) + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_topic_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_topic( + parent="parent_value", + topic=resources.Topic(name="name_value"), + topic_id="topic_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].topic + mock_val = resources.Topic(name="name_value") + assert arg == mock_val + arg = args[0].topic_id + mock_val = "topic_id_value" + assert arg == mock_val + + +def test_create_topic_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + managed_kafka.CreateTopicRequest(), + parent="parent_value", + topic=resources.Topic(name="name_value"), + topic_id="topic_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_topic_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_topic( + parent="parent_value", + topic=resources.Topic(name="name_value"), + topic_id="topic_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].topic + mock_val = resources.Topic(name="name_value") + assert arg == mock_val + arg = args[0].topic_id + mock_val = "topic_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_topic_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_topic( + managed_kafka.CreateTopicRequest(), + parent="parent_value", + topic=resources.Topic(name="name_value"), + topic_id="topic_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.UpdateTopicRequest, + dict, + ], +) +def test_update_topic(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + response = client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.UpdateTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +def test_update_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateTopicRequest() + + +def test_update_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.UpdateTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateTopicRequest() + + +def test_update_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_topic] = mock_rpc + request = {} + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + ) + response = await client.update_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateTopicRequest() + + +@pytest.mark.asyncio +async def test_update_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_topic + ] = mock_object + + request = {} + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_topic_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.UpdateTopicRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + ) + response = await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.UpdateTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +@pytest.mark.asyncio +async def test_update_topic_async_from_dict(): + await test_update_topic_async(request_type=dict) + + +def test_update_topic_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.UpdateTopicRequest() + + request.topic.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value = resources.Topic() + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_topic_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.UpdateTopicRequest() + + request.topic.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Topic()) + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic.name=name_value", + ) in kw["metadata"] + + +def test_update_topic_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_topic( + topic=resources.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = resources.Topic(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_topic_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_topic( + managed_kafka.UpdateTopicRequest(), + topic=resources.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_topic_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_topic( + topic=resources.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = resources.Topic(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_topic_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_topic( + managed_kafka.UpdateTopicRequest(), + topic=resources.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.DeleteTopicRequest, + dict, + ], +) +def test_delete_topic(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.DeleteTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteTopicRequest() + + +def test_delete_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.DeleteTopicRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteTopicRequest( + name="name_value", + ) + + +def test_delete_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_topic] = mock_rpc + request = {} + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteTopicRequest() + + +@pytest.mark.asyncio +async def test_delete_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_topic + ] = mock_object + + request = {} + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_topic_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.DeleteTopicRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.DeleteTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_topic_async_from_dict(): + await test_delete_topic_async(request_type=dict) + + +def test_delete_topic_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.DeleteTopicRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value = None + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_topic_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.DeleteTopicRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_topic_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_topic( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_topic_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + managed_kafka.DeleteTopicRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_topic( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_topic( + managed_kafka.DeleteTopicRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.ListConsumerGroupsRequest, + dict, + ], +) +def test_list_consumer_groups(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListConsumerGroupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.ListConsumerGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConsumerGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_consumer_groups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_consumer_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListConsumerGroupsRequest() + + +def test_list_consumer_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.ListConsumerGroupsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_consumer_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListConsumerGroupsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_consumer_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_consumer_groups in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_consumer_groups + ] = mock_rpc + request = {} + client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_consumer_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_consumer_groups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListConsumerGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_consumer_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.ListConsumerGroupsRequest() + + +@pytest.mark.asyncio +async def test_list_consumer_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_consumer_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_consumer_groups + ] = mock_object + + request = {} + await client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_consumer_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_consumer_groups_async( + transport: str = "grpc_asyncio", + request_type=managed_kafka.ListConsumerGroupsRequest, +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListConsumerGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.ListConsumerGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConsumerGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_consumer_groups_async_from_dict(): + await test_list_consumer_groups_async(request_type=dict) + + +def test_list_consumer_groups_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.ListConsumerGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + call.return_value = managed_kafka.ListConsumerGroupsResponse() + client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_consumer_groups_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.ListConsumerGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListConsumerGroupsResponse() + ) + await client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_consumer_groups_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListConsumerGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_consumer_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_consumer_groups_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_consumer_groups( + managed_kafka.ListConsumerGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_consumer_groups_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = managed_kafka.ListConsumerGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_kafka.ListConsumerGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_consumer_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_consumer_groups_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_consumer_groups( + managed_kafka.ListConsumerGroupsRequest(), + parent="parent_value", + ) + + +def test_list_consumer_groups_pager(transport_name: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + next_page_token="abc", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[], + next_page_token="def", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + ], + next_page_token="ghi", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_consumer_groups(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ConsumerGroup) for i in results) + + +def test_list_consumer_groups_pages(transport_name: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + next_page_token="abc", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[], + next_page_token="def", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + ], + next_page_token="ghi", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_consumer_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_consumer_groups_async_pager(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + next_page_token="abc", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[], + next_page_token="def", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + ], + next_page_token="ghi", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_consumer_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.ConsumerGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_consumer_groups_async_pages(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_consumer_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + next_page_token="abc", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[], + next_page_token="def", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + ], + next_page_token="ghi", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_consumer_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.GetConsumerGroupRequest, + dict, + ], +) +def test_get_consumer_group(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConsumerGroup( + name="name_value", + ) + response = client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.GetConsumerGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConsumerGroup) + assert response.name == "name_value" + + +def test_get_consumer_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_consumer_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetConsumerGroupRequest() + + +def test_get_consumer_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.GetConsumerGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_consumer_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetConsumerGroupRequest( + name="name_value", + ) + + +def test_get_consumer_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_consumer_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_consumer_group + ] = mock_rpc + request = {} + client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_consumer_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup( + name="name_value", + ) + ) + response = await client.get_consumer_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.GetConsumerGroupRequest() + + +@pytest.mark.asyncio +async def test_get_consumer_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_consumer_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_consumer_group + ] = mock_object + + request = {} + await client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_consumer_group_async( + transport: str = "grpc_asyncio", request_type=managed_kafka.GetConsumerGroupRequest +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup( + name="name_value", + ) + ) + response = await client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.GetConsumerGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConsumerGroup) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_consumer_group_async_from_dict(): + await test_get_consumer_group_async(request_type=dict) + + +def test_get_consumer_group_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.GetConsumerGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + call.return_value = resources.ConsumerGroup() + client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_consumer_group_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.GetConsumerGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup() + ) + await client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_consumer_group_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConsumerGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_consumer_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_consumer_group_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_consumer_group( + managed_kafka.GetConsumerGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_consumer_group_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConsumerGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_consumer_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_consumer_group_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_consumer_group( + managed_kafka.GetConsumerGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.UpdateConsumerGroupRequest, + dict, + ], +) +def test_update_consumer_group(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConsumerGroup( + name="name_value", + ) + response = client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.UpdateConsumerGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConsumerGroup) + assert response.name == "name_value" + + +def test_update_consumer_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_consumer_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateConsumerGroupRequest() + + +def test_update_consumer_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.UpdateConsumerGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_consumer_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateConsumerGroupRequest() + + +def test_update_consumer_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_consumer_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_consumer_group + ] = mock_rpc + request = {} + client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_consumer_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup( + name="name_value", + ) + ) + response = await client.update_consumer_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.UpdateConsumerGroupRequest() + + +@pytest.mark.asyncio +async def test_update_consumer_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_consumer_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_consumer_group + ] = mock_object + + request = {} + await client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_consumer_group_async( + transport: str = "grpc_asyncio", + request_type=managed_kafka.UpdateConsumerGroupRequest, +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup( + name="name_value", + ) + ) + response = await client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.UpdateConsumerGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConsumerGroup) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_consumer_group_async_from_dict(): + await test_update_consumer_group_async(request_type=dict) + + +def test_update_consumer_group_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.UpdateConsumerGroupRequest() + + request.consumer_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + call.return_value = resources.ConsumerGroup() + client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "consumer_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_consumer_group_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.UpdateConsumerGroupRequest() + + request.consumer_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup() + ) + await client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "consumer_group.name=name_value", + ) in kw["metadata"] + + +def test_update_consumer_group_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConsumerGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_consumer_group( + consumer_group=resources.ConsumerGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].consumer_group + mock_val = resources.ConsumerGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_consumer_group_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_consumer_group( + managed_kafka.UpdateConsumerGroupRequest(), + consumer_group=resources.ConsumerGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_consumer_group_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConsumerGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConsumerGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_consumer_group( + consumer_group=resources.ConsumerGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].consumer_group + mock_val = resources.ConsumerGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_consumer_group_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_consumer_group( + managed_kafka.UpdateConsumerGroupRequest(), + consumer_group=resources.ConsumerGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.DeleteConsumerGroupRequest, + dict, + ], +) +def test_delete_consumer_group(request_type, transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = managed_kafka.DeleteConsumerGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_consumer_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_consumer_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteConsumerGroupRequest() + + +def test_delete_consumer_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = managed_kafka.DeleteConsumerGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_consumer_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteConsumerGroupRequest( + name="name_value", + ) + + +def test_delete_consumer_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_consumer_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_consumer_group + ] = mock_rpc + request = {} + client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_consumer_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_consumer_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_kafka.DeleteConsumerGroupRequest() + + +@pytest.mark.asyncio +async def test_delete_consumer_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_consumer_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_consumer_group + ] = mock_object + + request = {} + await client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_consumer_group_async( + transport: str = "grpc_asyncio", + request_type=managed_kafka.DeleteConsumerGroupRequest, +): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = managed_kafka.DeleteConsumerGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_consumer_group_async_from_dict(): + await test_delete_consumer_group_async(request_type=dict) + + +def test_delete_consumer_group_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.DeleteConsumerGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + call.return_value = None + client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_consumer_group_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_kafka.DeleteConsumerGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_consumer_group_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_consumer_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_consumer_group_flattened_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_consumer_group( + managed_kafka.DeleteConsumerGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_consumer_group_flattened_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_consumer_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_consumer_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_consumer_group_flattened_error_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_consumer_group( + managed_kafka.DeleteConsumerGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = managed_kafka.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_clusters_rest_required_fields( + request_type=managed_kafka.ListClustersRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = managed_kafka.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.ListClustersRequest.pb( + managed_kafka.ListClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = managed_kafka.ListClustersResponse.to_json( + managed_kafka.ListClustersResponse() + ) + + request = managed_kafka.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = managed_kafka.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.ListClustersRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = managed_kafka.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + managed_kafka.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + managed_kafka.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + managed_kafka.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + managed_kafka.ListClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster( + name="name_value", + state=resources.Cluster.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.state == resources.Cluster.State.CREATING + + +def test_get_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cluster_rest_required_fields(request_type=managed_kafka.GetClusterRequest): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.GetClusterRequest.pb( + managed_kafka.GetClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Cluster.to_json(resources.Cluster()) + + request = managed_kafka.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.GetClusterRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + managed_kafka.GetClusterRequest(), + name="name_value", + ) + + +def test_get_cluster_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "gcp_config": { + "access_config": {"network_configs": [{"subnet": "subnet_value"}]}, + "kms_key": "kms_key_value", + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "capacity_config": {"vcpu_count": 1094, "memory_bytes": 1311}, + "rebalance_config": {"mode": 1}, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = managed_kafka.CreateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + + request = {} + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cluster_rest_required_fields( + request_type=managed_kafka.CreateClusterRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.CreateClusterRequest.pb( + managed_kafka.CreateClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = managed_kafka.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.CreateClusterRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + managed_kafka.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "gcp_config": { + "access_config": {"network_configs": [{"subnet": "subnet_value"}]}, + "kms_key": "kms_key_value", + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "capacity_config": {"vcpu_count": 1094, "memory_bytes": 1311}, + "rebalance_config": {"mode": 1}, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = managed_kafka.UpdateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + + request = {} + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_cluster_rest_required_fields( + request_type=managed_kafka.UpdateClusterRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.UpdateClusterRequest.pb( + managed_kafka.UpdateClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = managed_kafka.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.UpdateClusterRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{cluster.name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + managed_kafka.UpdateClusterRequest(), + cluster=resources.Cluster( + gcp_config=resources.GcpConfig( + access_config=resources.AccessConfig( + network_configs=[resources.NetworkConfig(subnet="subnet_value")] + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cluster_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + + request = {} + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cluster_rest_required_fields( + request_type=managed_kafka.DeleteClusterRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.DeleteClusterRequest.pb( + managed_kafka.DeleteClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = managed_kafka.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.DeleteClusterRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + managed_kafka.DeleteClusterRequest(), + name="name_value", + ) + + +def test_delete_cluster_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.ListTopicsRequest, + dict, + ], +) +def test_list_topics_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = managed_kafka.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_topics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_topics] = mock_rpc + + request = {} + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_topics_rest_required_fields(request_type=managed_kafka.ListTopicsRequest): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topics._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListTopicsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = managed_kafka.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_topics(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topics_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topics._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topics_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_list_topics" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_list_topics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.ListTopicsRequest.pb( + managed_kafka.ListTopicsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = managed_kafka.ListTopicsResponse.to_json( + managed_kafka.ListTopicsResponse() + ) + + request = managed_kafka.ListTopicsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = managed_kafka.ListTopicsResponse() + + client.list_topics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topics_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.ListTopicsRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_topics(request) + + +def test_list_topics_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListTopicsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = managed_kafka.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_topics(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/topics" + % client.transport._host, + args[1], + ) + + +def test_list_topics_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + managed_kafka.ListTopicsRequest(), + parent="parent_value", + ) + + +def test_list_topics_rest_pager(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + resources.Topic(), + ], + next_page_token="abc", + ), + managed_kafka.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + ], + next_page_token="ghi", + ), + managed_kafka.ListTopicsResponse( + topics=[ + resources.Topic(), + resources.Topic(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(managed_kafka.ListTopicsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_topics(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Topic) for i in results) + + pages = list(client.list_topics(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.GetTopicRequest, + dict, + ], +) +def test_get_topic_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +def test_get_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_topic] = mock_rpc + + request = {} + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_topic_rest_required_fields(request_type=managed_kafka.GetTopicRequest): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_topic_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_topic_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_get_topic" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_get_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.GetTopicRequest.pb(managed_kafka.GetTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Topic.to_json(resources.Topic()) + + request = managed_kafka.GetTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Topic() + + client.get_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_topic_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.GetTopicRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_topic(request) + + +def test_get_topic_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/topics/*}" + % client.transport._host, + args[1], + ) + + +def test_get_topic_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + managed_kafka.GetTopicRequest(), + name="name_value", + ) + + +def test_get_topic_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.CreateTopicRequest, + dict, + ], +) +def test_create_topic_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["topic"] = { + "name": "name_value", + "partition_count": 1634, + "replication_factor": 1912, + "configs": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = managed_kafka.CreateTopicRequest.meta.fields["topic"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["topic"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["topic"][field])): + del request_init["topic"][field][i][subfield] + else: + del request_init["topic"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +def test_create_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_topic] = mock_rpc + + request = {} + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_topic_rest_required_fields( + request_type=managed_kafka.CreateTopicRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["topic_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "topicId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "topicId" in jsonified_request + assert jsonified_request["topicId"] == request_init["topic_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["topicId"] = "topic_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_topic._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("topic_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "topicId" in jsonified_request + assert jsonified_request["topicId"] == "topic_id_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_topic(request) + + expected_params = [ + ( + "topicId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_topic_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_topic._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("topicId",)) + & set( + ( + "parent", + "topicId", + "topic", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_topic_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_create_topic" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_create_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.CreateTopicRequest.pb( + managed_kafka.CreateTopicRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Topic.to_json(resources.Topic()) + + request = managed_kafka.CreateTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Topic() + + client.create_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_topic_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.CreateTopicRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_topic(request) + + +def test_create_topic_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + topic=resources.Topic(name="name_value"), + topic_id="topic_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/topics" + % client.transport._host, + args[1], + ) + + +def test_create_topic_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + managed_kafka.CreateTopicRequest(), + parent="parent_value", + topic=resources.Topic(name="name_value"), + topic_id="topic_id_value", + ) + + +def test_create_topic_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.UpdateTopicRequest, + dict, + ], +) +def test_update_topic_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "topic": { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + } + request_init["topic"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4", + "partition_count": 1634, + "replication_factor": 1912, + "configs": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = managed_kafka.UpdateTopicRequest.meta.fields["topic"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["topic"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["topic"][field])): + del request_init["topic"][field][i][subfield] + else: + del request_init["topic"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Topic( + name="name_value", + partition_count=1634, + replication_factor=1912, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Topic) + assert response.name == "name_value" + assert response.partition_count == 1634 + assert response.replication_factor == 1912 + + +def test_update_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_topic] = mock_rpc + + request = {} + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_topic_rest_required_fields( + request_type=managed_kafka.UpdateTopicRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_topic._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_topic_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_topic._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "topic", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_topic_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_update_topic" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_update_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.UpdateTopicRequest.pb( + managed_kafka.UpdateTopicRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Topic.to_json(resources.Topic()) + + request = managed_kafka.UpdateTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Topic() + + client.update_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_topic_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.UpdateTopicRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "topic": { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_topic(request) + + +def test_update_topic_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = { + "topic": { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + topic=resources.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic.name=projects/*/locations/*/clusters/*/topics/*}" + % client.transport._host, + args[1], + ) + + +def test_update_topic_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_topic( + managed_kafka.UpdateTopicRequest(), + topic=resources.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_topic_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.DeleteTopicRequest, + dict, + ], +) +def test_delete_topic_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_topic(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_topic] = mock_rpc + + request = {} + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_topic_rest_required_fields( + request_type=managed_kafka.DeleteTopicRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_topic_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_topic_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_delete_topic" + ) as pre: + pre.assert_not_called() + pb_message = managed_kafka.DeleteTopicRequest.pb( + managed_kafka.DeleteTopicRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = managed_kafka.DeleteTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_topic_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.DeleteTopicRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_topic(request) + + +def test_delete_topic_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/topics/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/topics/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_topic_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + managed_kafka.DeleteTopicRequest(), + name="name_value", + ) + + +def test_delete_topic_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.ListConsumerGroupsRequest, + dict, + ], +) +def test_list_consumer_groups_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListConsumerGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = managed_kafka.ListConsumerGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_consumer_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConsumerGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_consumer_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_consumer_groups in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_consumer_groups + ] = mock_rpc + + request = {} + client.list_consumer_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_consumer_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_consumer_groups_rest_required_fields( + request_type=managed_kafka.ListConsumerGroupsRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_consumer_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_consumer_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListConsumerGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = managed_kafka.ListConsumerGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_consumer_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_consumer_groups_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_consumer_groups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_consumer_groups_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_list_consumer_groups" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_list_consumer_groups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.ListConsumerGroupsRequest.pb( + managed_kafka.ListConsumerGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = managed_kafka.ListConsumerGroupsResponse.to_json( + managed_kafka.ListConsumerGroupsResponse() + ) + + request = managed_kafka.ListConsumerGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = managed_kafka.ListConsumerGroupsResponse() + + client.list_consumer_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_consumer_groups_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.ListConsumerGroupsRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_consumer_groups(request) + + +def test_list_consumer_groups_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = managed_kafka.ListConsumerGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = managed_kafka.ListConsumerGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_consumer_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/consumerGroups" + % client.transport._host, + args[1], + ) + + +def test_list_consumer_groups_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_consumer_groups( + managed_kafka.ListConsumerGroupsRequest(), + parent="parent_value", + ) + + +def test_list_consumer_groups_rest_pager(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + next_page_token="abc", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[], + next_page_token="def", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + ], + next_page_token="ghi", + ), + managed_kafka.ListConsumerGroupsResponse( + consumer_groups=[ + resources.ConsumerGroup(), + resources.ConsumerGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + managed_kafka.ListConsumerGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_consumer_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ConsumerGroup) for i in results) + + pages = list(client.list_consumer_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.GetConsumerGroupRequest, + dict, + ], +) +def test_get_consumer_group_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConsumerGroup( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ConsumerGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_consumer_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConsumerGroup) + assert response.name == "name_value" + + +def test_get_consumer_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_consumer_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_consumer_group + ] = mock_rpc + + request = {} + client.get_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_consumer_group_rest_required_fields( + request_type=managed_kafka.GetConsumerGroupRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_consumer_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_consumer_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.ConsumerGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.ConsumerGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_consumer_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_consumer_group_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_consumer_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_consumer_group_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_get_consumer_group" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_get_consumer_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.GetConsumerGroupRequest.pb( + managed_kafka.GetConsumerGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.ConsumerGroup.to_json( + resources.ConsumerGroup() + ) + + request = managed_kafka.GetConsumerGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.ConsumerGroup() + + client.get_consumer_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_consumer_group_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.GetConsumerGroupRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_consumer_group(request) + + +def test_get_consumer_group_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConsumerGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ConsumerGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_consumer_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/consumerGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_consumer_group_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_consumer_group( + managed_kafka.GetConsumerGroupRequest(), + name="name_value", + ) + + +def test_get_consumer_group_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.UpdateConsumerGroupRequest, + dict, + ], +) +def test_update_consumer_group_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "consumer_group": { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + } + request_init["consumer_group"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4", + "topics": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = managed_kafka.UpdateConsumerGroupRequest.meta.fields["consumer_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["consumer_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["consumer_group"][field])): + del request_init["consumer_group"][field][i][subfield] + else: + del request_init["consumer_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConsumerGroup( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ConsumerGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_consumer_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConsumerGroup) + assert response.name == "name_value" + + +def test_update_consumer_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_consumer_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_consumer_group + ] = mock_rpc + + request = {} + client.update_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_consumer_group_rest_required_fields( + request_type=managed_kafka.UpdateConsumerGroupRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_consumer_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_consumer_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.ConsumerGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.ConsumerGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_consumer_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_consumer_group_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_consumer_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "consumerGroup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_consumer_group_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_update_consumer_group" + ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_update_consumer_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = managed_kafka.UpdateConsumerGroupRequest.pb( + managed_kafka.UpdateConsumerGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.ConsumerGroup.to_json( + resources.ConsumerGroup() + ) + + request = managed_kafka.UpdateConsumerGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.ConsumerGroup() + + client.update_consumer_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_consumer_group_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.UpdateConsumerGroupRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "consumer_group": { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_consumer_group(request) + + +def test_update_consumer_group_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConsumerGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "consumer_group": { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + consumer_group=resources.ConsumerGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ConsumerGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_consumer_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{consumer_group.name=projects/*/locations/*/clusters/*/consumerGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_consumer_group_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_consumer_group( + managed_kafka.UpdateConsumerGroupRequest(), + consumer_group=resources.ConsumerGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_consumer_group_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + managed_kafka.DeleteConsumerGroupRequest, + dict, + ], +) +def test_delete_consumer_group_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_consumer_group(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_consumer_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_consumer_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_consumer_group + ] = mock_rpc + + request = {} + client.delete_consumer_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_consumer_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_consumer_group_rest_required_fields( + request_type=managed_kafka.DeleteConsumerGroupRequest, +): + transport_class = transports.ManagedKafkaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_consumer_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_consumer_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_consumer_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_consumer_group_rest_unset_required_fields(): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_consumer_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_consumer_group_rest_interceptors(null_interceptor): + transport = transports.ManagedKafkaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ManagedKafkaRestInterceptor(), + ) + client = ManagedKafkaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "pre_delete_consumer_group" + ) as pre: + pre.assert_not_called() + pb_message = managed_kafka.DeleteConsumerGroupRequest.pb( + managed_kafka.DeleteConsumerGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = managed_kafka.DeleteConsumerGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_consumer_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_consumer_group_rest_bad_request( + transport: str = "rest", request_type=managed_kafka.DeleteConsumerGroupRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_consumer_group(request) + + +def test_delete_consumer_group_rest_flattened(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/consumerGroups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_consumer_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/consumerGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_consumer_group_rest_flattened_error(transport: str = "rest"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_consumer_group( + managed_kafka.DeleteConsumerGroupRequest(), + name="name_value", + ) + + +def test_delete_consumer_group_rest_error(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ManagedKafkaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ManagedKafkaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ManagedKafkaClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ManagedKafkaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ManagedKafkaClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ManagedKafkaClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ManagedKafkaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ManagedKafkaClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ManagedKafkaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ManagedKafkaClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ManagedKafkaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ManagedKafkaGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedKafkaGrpcTransport, + transports.ManagedKafkaGrpcAsyncIOTransport, + transports.ManagedKafkaRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ManagedKafkaClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ManagedKafkaGrpcTransport, + ) + + +def test_managed_kafka_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ManagedKafkaTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_managed_kafka_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.managedkafka_v1.services.managed_kafka.transports.ManagedKafkaTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ManagedKafkaTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "delete_cluster", + "list_topics", + "get_topic", + "create_topic", + "update_topic", + "delete_topic", + "list_consumer_groups", + "get_consumer_group", + "update_consumer_group", + "delete_consumer_group", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_managed_kafka_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.managedkafka_v1.services.managed_kafka.transports.ManagedKafkaTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ManagedKafkaTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_managed_kafka_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.managedkafka_v1.services.managed_kafka.transports.ManagedKafkaTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ManagedKafkaTransport() + adc.assert_called_once() + + +def test_managed_kafka_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ManagedKafkaClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedKafkaGrpcTransport, + transports.ManagedKafkaGrpcAsyncIOTransport, + ], +) +def test_managed_kafka_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedKafkaGrpcTransport, + transports.ManagedKafkaGrpcAsyncIOTransport, + transports.ManagedKafkaRestTransport, + ], +) +def test_managed_kafka_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ManagedKafkaGrpcTransport, grpc_helpers), + (transports.ManagedKafkaGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_managed_kafka_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "managedkafka.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="managedkafka.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ManagedKafkaGrpcTransport, transports.ManagedKafkaGrpcAsyncIOTransport], +) +def test_managed_kafka_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_managed_kafka_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ManagedKafkaRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_managed_kafka_rest_lro_client(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_managed_kafka_host_no_port(transport_name): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="managedkafka.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "managedkafka.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://managedkafka.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_managed_kafka_host_with_port(transport_name): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="managedkafka.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "managedkafka.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://managedkafka.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_managed_kafka_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ManagedKafkaClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ManagedKafkaClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_clusters._session + session2 = client2.transport.list_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cluster._session + session2 = client2.transport.get_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cluster._session + session2 = client2.transport.create_cluster._session + assert session1 != session2 + session1 = client1.transport.update_cluster._session + session2 = client2.transport.update_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cluster._session + session2 = client2.transport.delete_cluster._session + assert session1 != session2 + session1 = client1.transport.list_topics._session + session2 = client2.transport.list_topics._session + assert session1 != session2 + session1 = client1.transport.get_topic._session + session2 = client2.transport.get_topic._session + assert session1 != session2 + session1 = client1.transport.create_topic._session + session2 = client2.transport.create_topic._session + assert session1 != session2 + session1 = client1.transport.update_topic._session + session2 = client2.transport.update_topic._session + assert session1 != session2 + session1 = client1.transport.delete_topic._session + session2 = client2.transport.delete_topic._session + assert session1 != session2 + session1 = client1.transport.list_consumer_groups._session + session2 = client2.transport.list_consumer_groups._session + assert session1 != session2 + session1 = client1.transport.get_consumer_group._session + session2 = client2.transport.get_consumer_group._session + assert session1 != session2 + session1 = client1.transport.update_consumer_group._session + session2 = client2.transport.update_consumer_group._session + assert session1 != session2 + session1 = client1.transport.delete_consumer_group._session + session2 = client2.transport.delete_consumer_group._session + assert session1 != session2 + + +def test_managed_kafka_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ManagedKafkaGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_managed_kafka_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ManagedKafkaGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ManagedKafkaGrpcTransport, transports.ManagedKafkaGrpcAsyncIOTransport], +) +def test_managed_kafka_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ManagedKafkaGrpcTransport, transports.ManagedKafkaGrpcAsyncIOTransport], +) +def test_managed_kafka_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_managed_kafka_grpc_lro_client(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_managed_kafka_grpc_lro_async_client(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cluster_path(): + project = "squid" + location = "clam" + cluster = "whelk" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = ManagedKafkaClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cluster": "nudibranch", + } + path = ManagedKafkaClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_cluster_path(path) + assert expected == actual + + +def test_consumer_group_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + consumer_group = "nautilus" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumer_group}".format( + project=project, + location=location, + cluster=cluster, + consumer_group=consumer_group, + ) + actual = ManagedKafkaClient.consumer_group_path( + project, location, cluster, consumer_group + ) + assert expected == actual + + +def test_parse_consumer_group_path(): + expected = { + "project": "scallop", + "location": "abalone", + "cluster": "squid", + "consumer_group": "clam", + } + path = ManagedKafkaClient.consumer_group_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_consumer_group_path(path) + assert expected == actual + + +def test_crypto_key_path(): + project = "whelk" + location = "octopus" + key_ring = "oyster" + crypto_key = "nudibranch" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = ManagedKafkaClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "key_ring": "winkle", + "crypto_key": "nautilus", + } + path = ManagedKafkaClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_topic_path(): + project = "scallop" + location = "abalone" + cluster = "squid" + topic = "clam" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}".format( + project=project, + location=location, + cluster=cluster, + topic=topic, + ) + actual = ManagedKafkaClient.topic_path(project, location, cluster, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "topic": "nudibranch", + } + path = ManagedKafkaClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ManagedKafkaClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ManagedKafkaClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ManagedKafkaClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ManagedKafkaClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ManagedKafkaClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ManagedKafkaClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ManagedKafkaClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ManagedKafkaClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ManagedKafkaClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ManagedKafkaClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedKafkaClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ManagedKafkaTransport, "_prep_wrapped_messages" + ) as prep: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ManagedKafkaTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ManagedKafkaClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = ManagedKafkaClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ManagedKafkaAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ManagedKafkaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ManagedKafkaClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ManagedKafkaClient, transports.ManagedKafkaGrpcTransport), + (ManagedKafkaAsyncClient, transports.ManagedKafkaGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index e2fe87ca749a..24171812cc79 100644 --- a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -1546,13 +1546,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 460447319f53..f862928da3ff 100644 --- a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -1546,13 +1546,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py b/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py index 15cf60bbbe1c..300a1c4ddd0a 100644 --- a/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py +++ b/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py @@ -1573,13 +1573,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5084,13 +5084,13 @@ def test_list_import_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_import_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8038,13 +8038,13 @@ def test_list_import_data_files_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_import_data_files(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9426,13 +9426,13 @@ def test_list_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12323,13 +12323,13 @@ def test_list_error_frames_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_error_frames(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13268,13 +13268,13 @@ def test_list_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15411,13 +15411,13 @@ def test_list_preference_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_preference_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -19197,13 +19197,13 @@ def test_list_report_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_report_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20947,13 +20947,13 @@ def test_list_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py b/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py index 7b84af2fb072..9f70e80c9eea 100644 --- a/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py +++ b/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py @@ -1989,13 +1989,13 @@ def test_list_dashboards_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_dashboards(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/CHANGELOG.md b/packages/google-cloud-monitoring/CHANGELOG.md index db82d46471c1..50c8bb17d835 100644 --- a/packages/google-cloud-monitoring/CHANGELOG.md +++ b/packages/google-cloud-monitoring/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-monitoring/#history +## [2.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-monitoring-v2.21.0...google-cloud-monitoring-v2.22.0) (2024-06-24) + + +### Features + +* Add support to add links in AlertPolicy ([7fcde4f](https://github.com/googleapis/google-cloud-python/commit/7fcde4f8c1d8cbc5351cb3fb799450bbb78d5a2a)) + ## [2.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-monitoring-v2.20.0...google-cloud-monitoring-v2.21.0) (2024-04-17) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py b/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py index 558c8aab67c5..aa0e1f9b4b88 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py index 558c8aab67c5..aa0e1f9b4b88 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py index 867e60d83e07..20718f7e94a8 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py @@ -243,8 +243,38 @@ class Documentation(proto.Message): variables `__. If this field is missing or empty, a default subject line will be generated. + links (MutableSequence[google.cloud.monitoring_v3.types.AlertPolicy.Documentation.Link]): + Optional. Links to content such as playbooks, + repositories, and other resources. This field + can contain up to 3 entries. """ + class Link(proto.Message): + r"""Links to content such as playbooks, repositories, and other + resources. + + Attributes: + display_name (str): + A short display name for the link. The + display name must not be empty or exceed 63 + characters. Example: "playbook". + url (str): + The url of a webpage. A url can be templatized by using + variables in the path or the query parameters. The total + length of a URL should not exceed 2083 characters before and + after variable expansion. Example: + "https://my_domain.com/playbook?name=${resource.name}". + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + url: str = proto.Field( + proto.STRING, + number=2, + ) + content: str = proto.Field( proto.STRING, number=1, @@ -257,6 +287,11 @@ class Documentation(proto.Message): proto.STRING, number=3, ) + links: MutableSequence["AlertPolicy.Documentation.Link"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AlertPolicy.Documentation.Link", + ) class Condition(proto.Message): r"""A condition is a true/false test that determines when an diff --git a/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json b/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json index 04888a5ff523..cfe64565e3cc 100644 --- a/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json +++ b/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-monitoring", - "version": "0.1.0" + "version": "2.22.0" }, "snippets": [ { diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py index 49821bb197da..23812b682a72 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py @@ -1577,13 +1577,13 @@ def test_list_alert_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_alert_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py index 52d241d6f9c4..18cd4534722a 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py @@ -1494,13 +1494,13 @@ def test_list_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3598,13 +3598,13 @@ def test_list_group_members_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_group_members(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py index e996d139a936..83642e80b470 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py @@ -1542,13 +1542,13 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_monitored_resource_descriptors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2556,13 +2556,13 @@ def test_list_metric_descriptors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_metric_descriptors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4417,13 +4417,13 @@ def test_list_time_series_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_time_series(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py index d6364d361b33..e91cb4c6b2e1 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py @@ -1613,13 +1613,13 @@ def test_list_notification_channel_descriptors_pager(transport_name: str = "grpc RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_notification_channel_descriptors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2637,13 +2637,13 @@ def test_list_notification_channels_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_notification_channels(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py index cfebb83d99fa..07ce7ca91a60 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py @@ -1421,13 +1421,13 @@ def test_query_time_series_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.query_time_series(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py index 385c2b9a2c1f..3c80982ff82b 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py @@ -2316,13 +2316,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4444,13 +4444,13 @@ def test_list_service_level_objectives_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_service_level_objectives(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py index 69aa5afb0d27..115175e3ef0d 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py @@ -1889,13 +1889,13 @@ def test_list_snoozes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_snoozes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py index 7a2fbb48ea1e..753fceaaab4c 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py @@ -1569,13 +1569,13 @@ def test_list_uptime_check_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_uptime_check_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3643,10 +3643,10 @@ def test_list_uptime_check_ips_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_uptime_check_ips(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-monitoring/tests/unit/test_query.py b/packages/google-cloud-monitoring/tests/unit/test_query.py index 13d7cb2aff39..49ba02f0d53d 100644 --- a/packages/google-cloud-monitoring/tests/unit/test_query.py +++ b/packages/google-cloud-monitoring/tests/unit/test_query.py @@ -15,11 +15,12 @@ from __future__ import absolute_import import datetime -import google.auth.credentials as ga_credentials -import grpc import unittest from unittest import mock +import google.auth.credentials as ga_credentials +import grpc + from google.cloud import monitoring_v3 as monitoring_v3 from google.cloud.monitoring_v3 import MetricServiceClient from google.cloud.monitoring_v3.services.metric_service.transports import ( diff --git a/packages/google-cloud-netapp/CHANGELOG.md b/packages/google-cloud-netapp/CHANGELOG.md index bdec34015699..97589e682191 100644 --- a/packages/google-cloud-netapp/CHANGELOG.md +++ b/packages/google-cloud-netapp/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.3.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.9...google-cloud-netapp-v0.3.10) (2024-05-17) + + +### Features + +* Add a new Service Level FLEX ([74a7e9c](https://github.com/googleapis/google-cloud-python/commit/74a7e9c313c2d6301982eded0e46bc5176d2737b)) +* Add backup chain bytes to BackupConfig in Volume ([74a7e9c](https://github.com/googleapis/google-cloud-python/commit/74a7e9c313c2d6301982eded0e46bc5176d2737b)) +* Add Location metadata support ([74a7e9c](https://github.com/googleapis/google-cloud-python/commit/74a7e9c313c2d6301982eded0e46bc5176d2737b)) +* Add Tiering Policy to Volume ([74a7e9c](https://github.com/googleapis/google-cloud-python/commit/74a7e9c313c2d6301982eded0e46bc5176d2737b)) + ## [0.3.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.8...google-cloud-netapp-v0.3.9) (2024-03-22) diff --git a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py index 3d3ff409bdcc..911af7583e73 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py @@ -57,7 +57,11 @@ UpdateBackupVaultRequest, ) from google.cloud.netapp_v1.types.cloud_netapp_service import OperationMetadata -from google.cloud.netapp_v1.types.common import EncryptionType, ServiceLevel +from google.cloud.netapp_v1.types.common import ( + EncryptionType, + LocationMetadata, + ServiceLevel, +) from google.cloud.netapp_v1.types.kms import ( CreateKmsConfigRequest, DeleteKmsConfigRequest, @@ -123,6 +127,7 @@ SimpleExportPolicyRule, SMBSettings, SnapshotPolicy, + TieringPolicy, UpdateVolumeRequest, Volume, WeeklySchedule, @@ -160,6 +165,7 @@ "ListBackupVaultsResponse", "UpdateBackupVaultRequest", "OperationMetadata", + "LocationMetadata", "EncryptionType", "ServiceLevel", "CreateKmsConfigRequest", @@ -213,6 +219,7 @@ "RevertVolumeRequest", "SimpleExportPolicyRule", "SnapshotPolicy", + "TieringPolicy", "UpdateVolumeRequest", "Volume", "WeeklySchedule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py index bf13bbfbeb86..e55949c3cc22 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py @@ -56,7 +56,7 @@ UpdateBackupVaultRequest, ) from .types.cloud_netapp_service import OperationMetadata -from .types.common import EncryptionType, ServiceLevel +from .types.common import EncryptionType, LocationMetadata, ServiceLevel from .types.kms import ( CreateKmsConfigRequest, DeleteKmsConfigRequest, @@ -122,6 +122,7 @@ SimpleExportPolicyRule, SMBSettings, SnapshotPolicy, + TieringPolicy, UpdateVolumeRequest, Volume, WeeklySchedule, @@ -187,6 +188,7 @@ "ListStoragePoolsResponse", "ListVolumesRequest", "ListVolumesResponse", + "LocationMetadata", "MonthlySchedule", "MountOption", "NetAppClient", @@ -206,6 +208,7 @@ "SnapshotPolicy", "StopReplicationRequest", "StoragePool", + "TieringPolicy", "TransferStats", "UpdateActiveDirectoryRequest", "UpdateBackupPolicyRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py index 8f48f0d2bf35..794a07e0a390 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py @@ -439,7 +439,7 @@ async def sample_create_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" @@ -698,7 +698,7 @@ async def sample_update_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py index 6360e2bd7f6d..095e9842f68c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py @@ -1044,7 +1044,7 @@ def sample_create_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" @@ -1297,7 +1297,7 @@ def sample_update_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py index 3b6055f17bb3..fbdb02042fae 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py @@ -50,7 +50,7 @@ UpdateBackupVaultRequest, ) from .cloud_netapp_service import OperationMetadata -from .common import EncryptionType, ServiceLevel +from .common import EncryptionType, LocationMetadata, ServiceLevel from .kms import ( CreateKmsConfigRequest, DeleteKmsConfigRequest, @@ -116,6 +116,7 @@ SimpleExportPolicyRule, SMBSettings, SnapshotPolicy, + TieringPolicy, UpdateVolumeRequest, Volume, WeeklySchedule, @@ -151,6 +152,7 @@ "ListBackupVaultsResponse", "UpdateBackupVaultRequest", "OperationMetadata", + "LocationMetadata", "EncryptionType", "ServiceLevel", "CreateKmsConfigRequest", @@ -204,6 +206,7 @@ "RevertVolumeRequest", "SimpleExportPolicyRule", "SnapshotPolicy", + "TieringPolicy", "UpdateVolumeRequest", "Volume", "WeeklySchedule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py index 5b8db752a1ad..72cff8db6750 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py @@ -24,6 +24,7 @@ manifest={ "ServiceLevel", "EncryptionType", + "LocationMetadata", }, ) @@ -40,11 +41,14 @@ class ServiceLevel(proto.Enum): Extreme service level. STANDARD (3): Standard service level. + FLEX (4): + Flex service level. """ SERVICE_LEVEL_UNSPECIFIED = 0 PREMIUM = 1 EXTREME = 2 STANDARD = 3 + FLEX = 4 class EncryptionType(proto.Enum): @@ -65,4 +69,21 @@ class EncryptionType(proto.Enum): CLOUD_KMS = 2 +class LocationMetadata(proto.Message): + r"""Metadata for a given + [google.cloud.location.Location][google.cloud.location.Location]. + + Attributes: + supported_service_levels (MutableSequence[google.cloud.netapp_v1.types.ServiceLevel]): + Output only. Supported service levels in a + location. + """ + + supported_service_levels: MutableSequence["ServiceLevel"] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="ServiceLevel", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index 254af0289deb..acf2cd735fd0 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -49,6 +49,7 @@ "MountOption", "RestoreParameters", "BackupConfig", + "TieringPolicy", }, ) @@ -444,6 +445,10 @@ class Volume(proto.Message): restricted_actions (MutableSequence[google.cloud.netapp_v1.types.RestrictedAction]): Optional. List of actions that are restricted on this volume. + tiering_policy (google.cloud.netapp_v1.types.TieringPolicy): + Tiering policy for the volume. + + This field is a member of `oneof`_ ``_tiering_policy``. """ class State(proto.Enum): @@ -615,6 +620,12 @@ class State(proto.Enum): number=31, enum="RestrictedAction", ) + tiering_policy: "TieringPolicy" = proto.Field( + proto.MESSAGE, + number=34, + optional=True, + message="TieringPolicy", + ) class ExportPolicy(proto.Message): @@ -1100,6 +1111,12 @@ class BackupConfig(proto.Message): nil when there's no backup policy attached. This field is a member of `oneof`_ ``_scheduled_backup_enabled``. + backup_chain_bytes (int): + Output only. Total size of all backups in a + chain in bytes = baseline backup size + + sum(incremental backup size). + + This field is a member of `oneof`_ ``_backup_chain_bytes``. """ backup_policies: MutableSequence[str] = proto.RepeatedField( @@ -1115,6 +1132,61 @@ class BackupConfig(proto.Message): number=3, optional=True, ) + backup_chain_bytes: int = proto.Field( + proto.INT64, + number=4, + optional=True, + ) + + +class TieringPolicy(proto.Message): + r"""Defines tiering policy for the volume. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tier_action (google.cloud.netapp_v1.types.TieringPolicy.TierAction): + Optional. Flag indicating if the volume has + tiering policy enable/pause. Default is PAUSED. + + This field is a member of `oneof`_ ``_tier_action``. + cooling_threshold_days (int): + Optional. Time in days to mark the volume's + data block as cold and make it eligible for + tiering, can be range from 7-183. Default is 31. + + This field is a member of `oneof`_ ``_cooling_threshold_days``. + """ + + class TierAction(proto.Enum): + r"""Tier action for the volume. + + Values: + TIER_ACTION_UNSPECIFIED (0): + Unspecified. + ENABLED (1): + When tiering is enabled, new cold data will + be tiered. + PAUSED (2): + When paused, tiering won't be performed on + new data. Existing data stays tiered until + accessed. + """ + TIER_ACTION_UNSPECIFIED = 0 + ENABLED = 1 + PAUSED = 2 + + tier_action: TierAction = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=TierAction, + ) + cooling_threshold_days: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_async.py index acf3c174c969..def924e9d789 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_async.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_async.py @@ -40,7 +40,7 @@ async def sample_create_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_sync.py index eed99b09331d..76472bb263ea 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_sync.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_storage_pool_sync.py @@ -40,7 +40,7 @@ def sample_create_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_async.py index ca11d34cf69c..f874ce5faf26 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_async.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_async.py @@ -40,7 +40,7 @@ async def sample_update_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_sync.py index 68cbc4fdb89e..2440d5071d6a 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_sync.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_storage_pool_sync.py @@ -40,7 +40,7 @@ def sample_update_storage_pool(): # Initialize request argument(s) storage_pool = netapp_v1.StoragePool() - storage_pool.service_level = "STANDARD" + storage_pool.service_level = "FLEX" storage_pool.capacity_gib = 1247 storage_pool.network = "network_value" diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index fd7b53faf4dc..cf1b2874291e 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -1520,13 +1520,13 @@ def test_list_storage_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_storage_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3740,13 +3740,13 @@ def test_list_volumes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_volumes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6212,13 +6212,13 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_snapshots(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8328,13 +8328,13 @@ def test_list_active_directories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_active_directories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10595,13 +10595,13 @@ def test_list_kms_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_kms_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13377,13 +13377,13 @@ def test_list_replications_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_replications(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -17296,13 +17296,13 @@ def test_list_backup_vaults_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backup_vaults(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -19450,13 +19450,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -21627,13 +21627,13 @@ def test_list_backup_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backup_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -25228,8 +25228,10 @@ def test_create_volume_rest(request_type): "backup_policies": ["backup_policies_value1", "backup_policies_value2"], "backup_vault": "backup_vault_value", "scheduled_backup_enabled": True, + "backup_chain_bytes": 1886, }, "restricted_actions": [1], + "tiering_policy": {"tier_action": 1, "cooling_threshold_days": 2343}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25706,8 +25708,10 @@ def test_update_volume_rest(request_type): "backup_policies": ["backup_policies_value1", "backup_policies_value2"], "backup_vault": "backup_vault_value", "scheduled_backup_enabled": True, + "backup_chain_bytes": 1886, }, "restricted_actions": [1], + "tiering_policy": {"tier_action": 1, "cooling_threshold_days": 2343}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py index a22d157b75ac..8849d705d609 100644 --- a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py +++ b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py @@ -1486,13 +1486,13 @@ def test_list_hubs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_hubs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3576,13 +3576,13 @@ def test_list_hub_spokes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_hub_spokes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4146,13 +4146,13 @@ def test_list_spokes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_spokes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7802,13 +7802,13 @@ def test_list_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8394,13 +8394,13 @@ def test_list_route_tables_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_route_tables(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9348,13 +9348,13 @@ def test_list_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py index d07d11c85882..436c306b3aa4 100644 --- a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py +++ b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py @@ -1609,13 +1609,13 @@ def test_list_policy_based_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_policy_based_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py index 3889fa768b71..27c27df1b5ca 100644 --- a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py +++ b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py @@ -1482,13 +1482,13 @@ def test_list_hubs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_hubs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3572,13 +3572,13 @@ def test_list_spokes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_spokes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py index b500fa10e786..4f46fe720b10 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py @@ -221,7 +221,7 @@ class Endpoint(proto.Message): provide forwarding information in the control plane. Format: - projects/{project}/global/forwardingRules/{id} + projects/{project}/global/forwardingRules/{id} or projects/{project}/regions/{region}/forwardingRules/{id} forwarding_rule_target (google.cloud.network_management_v1.types.Endpoint.ForwardingRuleTarget): diff --git a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py index e233c5eef22d..f9575d2945d0 100644 --- a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py +++ b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py @@ -1650,13 +1650,13 @@ def test_list_connectivity_tests_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connectivity_tests(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py index 487d2ef16d52..d2abcfd520ba 100644 --- a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py @@ -1604,13 +1604,13 @@ def test_list_authorization_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_authorization_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3827,13 +3827,13 @@ def test_list_server_tls_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_server_tls_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6030,13 +6030,13 @@ def test_list_client_tls_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_client_tls_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py index ac861fb8f8da..b3f5257f5c48 100644 --- a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py @@ -1571,13 +1571,13 @@ def test_list_authorization_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_authorization_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3794,13 +3794,13 @@ def test_list_server_tls_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_server_tls_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5997,13 +5997,13 @@ def test_list_client_tls_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_client_tls_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-network-services/CHANGELOG.md b/packages/google-cloud-network-services/CHANGELOG.md index c4d68c15128c..798a109a6580 100644 --- a/packages/google-cloud-network-services/CHANGELOG.md +++ b/packages/google-cloud-network-services/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [0.5.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-network-services-v0.5.11...google-cloud-network-services-v0.5.12) (2024-06-12) + + +### Documentation + +* [google-cloud-network-services] Add a comment for the NetworkServices service ([#12794](https://github.com/googleapis/google-cloud-python/issues/12794)) ([4a672dc](https://github.com/googleapis/google-cloud-python/commit/4a672dc077d173d170d6cb4888cd7f4643ce04c6)) + +## [0.5.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-network-services-v0.5.10...google-cloud-network-services-v0.5.11) (2024-06-05) + + +### Features + +* A new field `metadata` is added to message `.google.cloud.networkservices.v1.LbRouteExtension` ([d219016](https://github.com/googleapis/google-cloud-python/commit/d219016564091c27dc38c75ab1a29da658c98533)) +* A new field `metadata` is added to message `.google.cloud.networkservices.v1.LbTrafficExtension` ([d219016](https://github.com/googleapis/google-cloud-python/commit/d219016564091c27dc38c75ab1a29da658c98533)) + + +### Documentation + +* update the comments for dep resources ([d219016](https://github.com/googleapis/google-cloud-python/commit/d219016564091c27dc38c75ab1a29da658c98533)) + ## [0.5.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-network-services-v0.5.9...google-cloud-network-services-v0.5.10) (2024-03-25) diff --git a/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py b/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py index 558c8aab67c5..cc43a639a105 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py +++ b/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.5.12" # {x-release-please-version} diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py index 558c8aab67c5..cc43a639a105 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.5.12" # {x-release-please-version} diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py index 76a6091f6160..ea08b350be26 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py @@ -51,6 +51,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.network_services_v1.services.dep_service import pagers @@ -714,7 +715,7 @@ async def sample_update_lb_traffic_extension(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Used to specify the fields to be overwritten + Optional. Used to specify the fields to be overwritten in the ``LbTrafficExtension`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field is @@ -1366,7 +1367,7 @@ async def sample_update_lb_route_extension(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Used to specify the fields to be overwritten + Optional. Used to specify the fields to be overwritten in the ``LbRouteExtension`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field is overwritten diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py index 921a1a64906e..36f6d8fea289 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py @@ -56,6 +56,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.network_services_v1.services.dep_service import pagers @@ -1146,7 +1147,7 @@ def sample_update_lb_traffic_extension(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Used to specify the fields to be overwritten + Optional. Used to specify the fields to be overwritten in the ``LbTrafficExtension`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field is @@ -1789,7 +1790,7 @@ def sample_update_lb_route_extension(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Used to specify the fields to be overwritten + Optional. Used to specify the fields to be overwritten in the ``LbRouteExtension`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field is overwritten diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py index 6a688877fcd5..6cba38871715 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py @@ -1478,9 +1478,7 @@ class _UpdateLbRouteExtension(DepServiceRestStub): def __hash__(self): return hash("UpdateLbRouteExtension") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask": {}, - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1575,9 +1573,7 @@ class _UpdateLbTrafficExtension(DepServiceRestStub): def __hash__(self): return hash("UpdateLbTrafficExtension") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask": {}, - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py index ea046c8484de..673494b5c28a 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py @@ -82,7 +82,7 @@ class NetworkServicesAsyncClient: - """""" + """Service describing handlers for resources.""" _client: NetworkServicesClient diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py index 7545fe20f535..cf31c99181fb 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py @@ -125,7 +125,7 @@ def get_transport_class( class NetworkServicesClient(metaclass=NetworkServicesClientMeta): - """""" + """Service describing handlers for resources.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc.py index c7d5b9068214..0b44b15e333b 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc.py @@ -53,6 +53,8 @@ class NetworkServicesGrpcTransport(NetworkServicesTransport): """gRPC backend transport for NetworkServices. + Service describing handlers for resources. + This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc_asyncio.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc_asyncio.py index 7a94786469b1..56f0b6376ee3 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc_asyncio.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/grpc_asyncio.py @@ -56,6 +56,8 @@ class NetworkServicesGrpcAsyncIOTransport(NetworkServicesTransport): """gRPC AsyncIO backend transport for NetworkServices. + Service describing handlers for resources. + This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/rest.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/rest.py index d3fdc5234460..32cf531a6972 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/rest.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/transports/rest.py @@ -1496,6 +1496,8 @@ class NetworkServicesRestStub: class NetworkServicesRestTransport(NetworkServicesTransport): """REST backend transport for NetworkServices. + Service describing handlers for resources. + This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py index c26f82828c9a..4a53c636c866 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py @@ -19,6 +19,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -179,8 +180,8 @@ class Extension(proto.Message): supported_events (MutableSequence[google.cloud.network_services_v1.types.EventType]): Optional. A set of events during request or response processing for which this extension is called. This field is - required for the ``LbTrafficExtension`` resource. It's not - relevant for the ``LbRouteExtension`` resource. + required for the ``LbTrafficExtension`` resource. It must + not be set for the ``LbRouteExtension`` resource. timeout (google.protobuf.duration_pb2.Duration): Optional. Specifies the timeout for each individual message on the stream. The timeout @@ -305,6 +306,15 @@ class LbTrafficExtension(proto.Message): ``EXTERNAL_MANAGED``. For more information, refer to `Choosing a load balancer `__. + metadata (google.protobuf.struct_pb2.Struct): + Optional. The metadata provided here is included in the + ``ProcessingRequest.metadata_context.filter_metadata`` map + field. The metadata is available under the key + ``com.google.lb_traffic_extension.``. The + following variables are supported in the metadata: + + ``{forwarding_rule_id}`` - substituted with the forwarding + rule's fully qualified resource name. """ name: str = proto.Field( @@ -344,6 +354,11 @@ class LbTrafficExtension(proto.Message): number=8, enum="LoadBalancingScheme", ) + metadata: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=10, + message=struct_pb2.Struct, + ) class ListLbTrafficExtensionsRequest(proto.Message): @@ -498,7 +513,7 @@ class UpdateLbTrafficExtensionRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Used to specify the fields to be overwritten in + Optional. Used to specify the fields to be overwritten in the ``LbTrafficExtension`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field is overwritten if it @@ -630,6 +645,17 @@ class LbRouteExtension(proto.Message): ``EXTERNAL_MANAGED``. For more information, refer to `Choosing a load balancer `__. + metadata (google.protobuf.struct_pb2.Struct): + Optional. The metadata provided here is included as part of + the ``metadata_context`` (of type + ``google.protobuf.Struct``) in the ``ProcessingRequest`` + message sent to the extension server. The metadata is + available under the namespace + ``com.google.lb_route_extension.``. The + following variables are supported in the metadata Struct: + + ``{forwarding_rule_id}`` - substituted with the forwarding + rule's fully qualified resource name. """ name: str = proto.Field( @@ -669,6 +695,11 @@ class LbRouteExtension(proto.Message): number=8, enum="LoadBalancingScheme", ) + metadata: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=10, + message=struct_pb2.Struct, + ) class ListLbRouteExtensionsRequest(proto.Message): @@ -823,7 +854,7 @@ class UpdateLbRouteExtensionRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Used to specify the fields to be overwritten in + Optional. Used to specify the fields to be overwritten in the ``LbRouteExtension`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field is overwritten if it is in the diff --git a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json index 5efe8fa4280b..3ac26a8ec5dc 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json +++ b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-services", - "version": "0.1.0" + "version": "0.5.12" }, "snippets": [ { diff --git a/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py b/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py index 380d90eaecf1..22a35f3c0c14 100644 --- a/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py +++ b/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py @@ -83,8 +83,8 @@ class network_servicesCallTransformer(cst.CSTTransformer): 'update_gateway': ('gateway', 'update_mask', ), 'update_grpc_route': ('grpc_route', 'update_mask', ), 'update_http_route': ('http_route', 'update_mask', ), - 'update_lb_route_extension': ('update_mask', 'lb_route_extension', 'request_id', ), - 'update_lb_traffic_extension': ('update_mask', 'lb_traffic_extension', 'request_id', ), + 'update_lb_route_extension': ('lb_route_extension', 'update_mask', 'request_id', ), + 'update_lb_traffic_extension': ('lb_traffic_extension', 'update_mask', 'request_id', ), 'update_mesh': ('mesh', 'update_mask', ), 'update_tcp_route': ('tcp_route', 'update_mask', ), 'update_tls_route': ('tls_route', 'update_mask', ), diff --git a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py index 941811e60be8..8520369230fd 100644 --- a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py +++ b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py @@ -51,6 +51,7 @@ from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -1535,13 +1536,13 @@ def test_list_lb_traffic_extensions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_lb_traffic_extensions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3747,13 +3748,13 @@ def test_list_lb_route_extensions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_lb_route_extensions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6266,6 +6267,7 @@ def test_create_lb_traffic_extension_rest(request_type): } ], "load_balancing_scheme": 1, + "metadata": {"fields": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6714,6 +6716,7 @@ def test_update_lb_traffic_extension_rest(request_type): } ], "load_balancing_scheme": 1, + "metadata": {"fields": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6936,12 +6939,7 @@ def test_update_lb_traffic_extension_rest_unset_required_fields(): "updateMask", ) ) - & set( - ( - "updateMask", - "lbTrafficExtension", - ) - ) + & set(("lbTrafficExtension",)) ) @@ -8154,6 +8152,7 @@ def test_create_lb_route_extension_rest(request_type): } ], "load_balancing_scheme": 1, + "metadata": {"fields": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -8599,6 +8598,7 @@ def test_update_lb_route_extension_rest(request_type): } ], "load_balancing_scheme": 1, + "metadata": {"fields": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -8819,12 +8819,7 @@ def test_update_lb_route_extension_rest_unset_required_fields(): "updateMask", ) ) - & set( - ( - "updateMask", - "lbRouteExtension", - ) - ) + & set(("lbRouteExtension",)) ) diff --git a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py index 8c08e4f4534a..7561bcd01d2d 100644 --- a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py +++ b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py @@ -1612,13 +1612,13 @@ def test_list_endpoint_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_endpoint_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3809,13 +3809,13 @@ def test_list_gateways_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_gateways(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5902,13 +5902,13 @@ def test_list_grpc_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_grpc_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8054,13 +8054,13 @@ def test_list_http_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_http_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10204,13 +10204,13 @@ def test_list_tcp_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tcp_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12291,13 +12291,13 @@ def test_list_tls_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tls_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14404,13 +14404,13 @@ def test_list_service_bindings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_service_bindings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16181,13 +16181,13 @@ def test_list_meshes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_meshes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py index 5f1b957a3fbf..ed5b83eca02b 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py @@ -1572,13 +1572,13 @@ def test_list_runtimes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_runtimes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py index dbb0a8e76b41..a0cd43e6a216 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py @@ -1546,13 +1546,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8708,13 +8708,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10472,13 +10472,13 @@ def test_list_schedules_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_schedules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12481,13 +12481,13 @@ def test_list_executions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_executions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py index fb07e7acdb10..9e826bf15fe7 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py @@ -1487,13 +1487,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6265,13 +6265,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py index cdefc0bf1e40..62999d1c70e4 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py @@ -1578,13 +1578,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py index 55ecdd5a3e33..5655d2c079ee 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py @@ -2321,13 +2321,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4645,13 +4645,13 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6034,13 +6034,13 @@ def test_list_user_workloads_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_user_workloads_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8197,13 +8197,13 @@ def test_list_user_workloads_config_maps_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_user_workloads_config_maps(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py index e8e6df8abb48..c7bfdc3319a4 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py @@ -1546,13 +1546,13 @@ def test_list_image_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_image_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py index 7c6bd89280ae..5b0c6ad52bfc 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py @@ -2324,13 +2324,13 @@ def test_list_environments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_environments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5242,13 +5242,13 @@ def test_list_workloads_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workloads(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6631,13 +6631,13 @@ def test_list_user_workloads_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_user_workloads_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8794,13 +8794,13 @@ def test_list_user_workloads_config_maps_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_user_workloads_config_maps(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py index 56d69049c969..ba178970a565 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py @@ -1546,13 +1546,13 @@ def test_list_image_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_image_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py index 97ebd7bf4cad..3d893556e952 100644 --- a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py @@ -2622,13 +2622,13 @@ def test_list_patch_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_patch_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3211,13 +3211,13 @@ def test_list_patch_job_instance_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_patch_job_instance_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4631,13 +4631,13 @@ def test_list_patch_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_patch_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py index 0579cce24e77..ba4c23bc42f6 100644 --- a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py @@ -2908,13 +2908,13 @@ def test_list_os_policy_assignments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_os_policy_assignments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3513,13 +3513,13 @@ def test_list_os_policy_assignment_revisions_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_os_policy_assignment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4935,13 +4935,13 @@ def test_list_os_policy_assignment_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_os_policy_assignment_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5877,13 +5877,13 @@ def test_list_inventories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_inventories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6859,13 +6859,13 @@ def test_list_vulnerability_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_vulnerability_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py index 2ffd682dadd9..d4afeabd98bf 100644 --- a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py @@ -2910,13 +2910,13 @@ def test_list_os_policy_assignments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_os_policy_assignments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3515,13 +3515,13 @@ def test_list_os_policy_assignment_revisions_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_os_policy_assignment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4969,13 +4969,13 @@ def test_list_instance_os_policies_compliances_pager(transport_name: str = "grpc RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instance_os_policies_compliances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6000,13 +6000,13 @@ def test_list_os_policy_assignment_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_os_policy_assignment_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6942,13 +6942,13 @@ def test_list_inventories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_inventories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7924,13 +7924,13 @@ def test_list_vulnerability_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_vulnerability_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-os-login/CHANGELOG.md b/packages/google-cloud-os-login/CHANGELOG.md index 6c48623c6d01..ef0e5aa3a2e3 100644 --- a/packages/google-cloud-os-login/CHANGELOG.md +++ b/packages/google-cloud-os-login/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-oslogin/#history +## [2.14.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-os-login-v2.14.3...google-cloud-os-login-v2.14.4) (2024-06-27) + + +### Documentation + +* [google-cloud-os-login] A comment for field `parent` in message `.google.cloud.oslogin.v1beta.SignSshPublicKeyRequest` is changed ([#12831](https://github.com/googleapis/google-cloud-python/issues/12831)) ([7ce3bf4](https://github.com/googleapis/google-cloud-python/commit/7ce3bf4332dcebf5cdb2e2165003367134e9a6c2)) + ## [2.14.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-os-login-v2.14.2...google-cloud-os-login-v2.14.3) (2024-03-05) diff --git a/packages/google-cloud-os-login/README.rst b/packages/google-cloud-os-login/README.rst index d2307da89516..f6dc1c1f2a0d 100644 --- a/packages/google-cloud-os-login/README.rst +++ b/packages/google-cloud-os-login/README.rst @@ -15,7 +15,7 @@ Python Client for Google Cloud OS Login .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-os-login.svg :target: https://pypi.org/project/google-cloud-os-login/ .. _Google Cloud OS Login: https://cloud.google.com/compute/docs/oslogin/ -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/oslogin/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/oslogin/latest/summary_overview .. _Product Documentation: https://cloud.google.com/compute/docs/oslogin/ Quick Start diff --git a/packages/google-cloud-os-login/docs/index.rst b/packages/google-cloud-os-login/docs/index.rst index c18563cc44f9..85ac2194cb99 100644 --- a/packages/google-cloud-os-login/docs/index.rst +++ b/packages/google-cloud-os-login/docs/index.rst @@ -22,3 +22,8 @@ For a list of all ``google-cloud-os-login`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-os-login/docs/summary_overview.md b/packages/google-cloud-os-login/docs/summary_overview.md new file mode 100644 index 000000000000..726a45ced6ba --- /dev/null +++ b/packages/google-cloud-os-login/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Google Cloud OS Login API + +Overview of the APIs available for Google Cloud OS Login API. + +## All entries + +Classes, methods and properties & attributes for +Google Cloud OS Login API. + +[classes](https://cloud.google.com/python/docs/reference/oslogin/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/oslogin/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/oslogin/latest/summary_property.html) diff --git a/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py b/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py index 558c8aab67c5..e37a1814aa29 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.14.4" # {x-release-please-version} diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py index 558c8aab67c5..e37a1814aa29 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.14.4" # {x-release-please-version} diff --git a/packages/google-cloud-os-login/noxfile.py b/packages/google-cloud-os-login/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-os-login/noxfile.py +++ b/packages/google-cloud-os-login/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json b/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json index 92b367f767a3..3de8efd2a769 100644 --- a/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json +++ b/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-os-login", - "version": "0.1.0" + "version": "2.14.4" }, "snippets": [ { diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py index 364164ddb134..558c8aab67c5 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py index 364164ddb134..558c8aab67c5 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json index dae5753d8994..01334a8f8053 100644 --- a/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json +++ b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-parallelstore", - "version": "0.2.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py index 7d5ff2951562..6a59d744ff76 100644 --- a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py +++ b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py @@ -1543,13 +1543,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py b/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py index 38d8fd3c6a43..dccc48abbe3b 100644 --- a/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py +++ b/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py @@ -2263,13 +2263,13 @@ def test_list_replay_results_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_replay_results(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-private-ca/security-privateca-v1-py.tar.gz b/packages/google-cloud-private-ca/security-privateca-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py index b17dbaf03ced..a659b075f25a 100644 --- a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py +++ b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py @@ -2500,13 +2500,13 @@ def test_list_certificates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6362,13 +6362,13 @@ def test_list_certificate_authorities_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_authorities(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9273,13 +9273,13 @@ def test_list_ca_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_ca_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11021,13 +11021,13 @@ def test_list_certificate_revocation_lists_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_revocation_lists(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13245,13 +13245,13 @@ def test_list_certificate_templates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_templates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py index b79fd12a3743..985a23b3ccb8 100644 --- a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py +++ b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py @@ -2463,13 +2463,13 @@ def test_list_certificates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6295,13 +6295,13 @@ def test_list_certificate_authorities_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_authorities(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8503,13 +8503,13 @@ def test_list_certificate_revocation_lists_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_certificate_revocation_lists(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9913,13 +9913,13 @@ def test_list_reusable_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reusable_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py b/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py index 41a96e9c70ed..133b2255e673 100644 --- a/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py +++ b/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py @@ -1444,13 +1444,13 @@ def test_search_catalogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("resource", ""),)), ) pager = client.search_catalogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1925,13 +1925,13 @@ def test_search_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("resource", ""),)), ) pager = client.search_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2406,13 +2406,13 @@ def test_search_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("resource", ""),)), ) pager = client.search_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py b/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json index 44fad66d5ee7..3150579e1285 100644 --- a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json +++ b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-security-publicca", - "version": "0.3.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json index d2c5574111ac..b4590ffb004f 100644 --- a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json +++ b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-public-ca", - "version": "0.3.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py index ed9a4aeef8ab..539843b1103a 100644 --- a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py +++ b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py @@ -2801,13 +2801,13 @@ def test_list_collectors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_collectors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md index 6b0a21362242..2fbe586ca8e4 100644 --- a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md +++ b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.20.0...google-cloud-recaptcha-enterprise-v1.21.0) (2024-06-27) + + +### Features + +* [google-cloud-recaptcha-enterprise] added SMS Toll Fraud assessment ([#12838](https://github.com/googleapis/google-cloud-python/issues/12838)) ([386499f](https://github.com/googleapis/google-cloud-python/commit/386499fe11525d2646b79f9e41829b2c055f63b2)) + ## [1.20.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.19.1...google-cloud-recaptcha-enterprise-v1.20.0) (2024-04-24) diff --git a/packages/google-cloud-recaptcha-enterprise/README.rst b/packages/google-cloud-recaptcha-enterprise/README.rst index ba4d9166d434..5a19245ba7a0 100644 --- a/packages/google-cloud-recaptcha-enterprise/README.rst +++ b/packages/google-cloud-recaptcha-enterprise/README.rst @@ -15,7 +15,7 @@ Python Client for reCAPTCHA Enterprise .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-recaptcha-enterprise.svg :target: https://pypi.org/project/google-cloud-recaptcha-enterprise/ .. _reCAPTCHA Enterprise: https://cloud.google.com/recaptcha-enterprise -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest/summary_overview .. _Product Documentation: https://cloud.google.com/recaptcha-enterprise Quick Start diff --git a/packages/google-cloud-recaptcha-enterprise/docs/index.rst b/packages/google-cloud-recaptcha-enterprise/docs/index.rst index b08590a94948..25c957ba45c7 100644 --- a/packages/google-cloud-recaptcha-enterprise/docs/index.rst +++ b/packages/google-cloud-recaptcha-enterprise/docs/index.rst @@ -21,3 +21,8 @@ For a list of all ``google-cloud-recaptcha-enterprise`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-recaptcha-enterprise/docs/summary_overview.md b/packages/google-cloud-recaptcha-enterprise/docs/summary_overview.md new file mode 100644 index 000000000000..b9b12d0ef7dc --- /dev/null +++ b/packages/google-cloud-recaptcha-enterprise/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# reCAPTCHA Enterprise API + +Overview of the APIs available for reCAPTCHA Enterprise API. + +## All entries + +Classes, methods and properties & attributes for +reCAPTCHA Enterprise API. + +[classes](https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest/summary_property.html) diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py index 9317ce66092d..f01824e24d20 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py @@ -60,6 +60,7 @@ ListRelatedAccountGroupsResponse, Metrics, MigrateKeyRequest, + PhoneFraudAssessment, PrivatePasswordLeakVerification, RelatedAccountGroup, RelatedAccountGroupMembership, @@ -72,6 +73,7 @@ ScoreMetrics, SearchRelatedAccountGroupMembershipsRequest, SearchRelatedAccountGroupMembershipsResponse, + SmsTollFraudVerdict, TestingOptions, TokenProperties, TransactionData, @@ -122,6 +124,7 @@ "ListRelatedAccountGroupsResponse", "Metrics", "MigrateKeyRequest", + "PhoneFraudAssessment", "PrivatePasswordLeakVerification", "RelatedAccountGroup", "RelatedAccountGroupMembership", @@ -134,6 +137,7 @@ "ScoreMetrics", "SearchRelatedAccountGroupMembershipsRequest", "SearchRelatedAccountGroupMembershipsResponse", + "SmsTollFraudVerdict", "TestingOptions", "TokenProperties", "TransactionData", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 558c8aab67c5..785067d93b3c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py index e02c0de848fe..32e4af3d140c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py @@ -58,6 +58,7 @@ ListRelatedAccountGroupsResponse, Metrics, MigrateKeyRequest, + PhoneFraudAssessment, PrivatePasswordLeakVerification, RelatedAccountGroup, RelatedAccountGroupMembership, @@ -70,6 +71,7 @@ ScoreMetrics, SearchRelatedAccountGroupMembershipsRequest, SearchRelatedAccountGroupMembershipsResponse, + SmsTollFraudVerdict, TestingOptions, TokenProperties, TransactionData, @@ -119,6 +121,7 @@ "ListRelatedAccountGroupsResponse", "Metrics", "MigrateKeyRequest", + "PhoneFraudAssessment", "PrivatePasswordLeakVerification", "RecaptchaEnterpriseServiceClient", "RelatedAccountGroup", @@ -132,6 +135,7 @@ "ScoreMetrics", "SearchRelatedAccountGroupMembershipsRequest", "SearchRelatedAccountGroupMembershipsResponse", + "SmsTollFraudVerdict", "TestingOptions", "TokenProperties", "TransactionData", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 558c8aab67c5..785067d93b3c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py index 51cf59b9d955..c035b7638c0e 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py @@ -49,6 +49,7 @@ ListRelatedAccountGroupsResponse, Metrics, MigrateKeyRequest, + PhoneFraudAssessment, PrivatePasswordLeakVerification, RelatedAccountGroup, RelatedAccountGroupMembership, @@ -61,6 +62,7 @@ ScoreMetrics, SearchRelatedAccountGroupMembershipsRequest, SearchRelatedAccountGroupMembershipsResponse, + SmsTollFraudVerdict, TestingOptions, TokenProperties, TransactionData, @@ -109,6 +111,7 @@ "ListRelatedAccountGroupsResponse", "Metrics", "MigrateKeyRequest", + "PhoneFraudAssessment", "PrivatePasswordLeakVerification", "RelatedAccountGroup", "RelatedAccountGroupMembership", @@ -121,6 +124,7 @@ "ScoreMetrics", "SearchRelatedAccountGroupMembershipsRequest", "SearchRelatedAccountGroupMembershipsResponse", + "SmsTollFraudVerdict", "TestingOptions", "TokenProperties", "TransactionData", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py index d48abbc9acf9..8f4b9a655592 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py @@ -41,6 +41,8 @@ "TokenProperties", "FraudPreventionAssessment", "FraudSignals", + "SmsTollFraudVerdict", + "PhoneFraudAssessment", "AccountDefenderAssessment", "CreateKeyRequest", "ListKeysRequest", @@ -674,6 +676,10 @@ class Assessment(proto.Message): fraud_signals (google.cloud.recaptchaenterprise_v1.types.FraudSignals): Output only. Fraud Signals specific to the users involved in a payment transaction. + phone_fraud_assessment (google.cloud.recaptchaenterprise_v1.types.PhoneFraudAssessment): + Output only. Assessment returned when a site key, a token, + and a phone number as ``user_id`` are provided. Account + defender and SMS toll fraud protection need to be enabled. """ name: str = proto.Field( @@ -725,6 +731,11 @@ class Assessment(proto.Message): number=13, message="FraudSignals", ) + phone_fraud_assessment: "PhoneFraudAssessment" = proto.Field( + proto.MESSAGE, + number=12, + message="PhoneFraudAssessment", + ) class Event(proto.Message): @@ -1615,6 +1626,58 @@ class CardLabel(proto.Enum): ) +class SmsTollFraudVerdict(proto.Message): + r"""Information about SMS toll fraud. + + Attributes: + risk (float): + Output only. Probability of an SMS event + being fraudulent. Values are from 0.0 (lowest) + to 1.0 (highest). + reasons (MutableSequence[google.cloud.recaptchaenterprise_v1.types.SmsTollFraudVerdict.SmsTollFraudReason]): + Output only. Reasons contributing to the SMS + toll fraud verdict. + """ + + class SmsTollFraudReason(proto.Enum): + r"""Reasons contributing to the SMS toll fraud verdict. + + Values: + SMS_TOLL_FRAUD_REASON_UNSPECIFIED (0): + Default unspecified reason + INVALID_PHONE_NUMBER (1): + The provided phone number was invalid + """ + SMS_TOLL_FRAUD_REASON_UNSPECIFIED = 0 + INVALID_PHONE_NUMBER = 1 + + risk: float = proto.Field( + proto.FLOAT, + number=1, + ) + reasons: MutableSequence[SmsTollFraudReason] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=SmsTollFraudReason, + ) + + +class PhoneFraudAssessment(proto.Message): + r"""Assessment for Phone Fraud + + Attributes: + sms_toll_fraud_verdict (google.cloud.recaptchaenterprise_v1.types.SmsTollFraudVerdict): + Output only. Assessment of this phone event + for risk of SMS toll fraud. + """ + + sms_toll_fraud_verdict: "SmsTollFraudVerdict" = proto.Field( + proto.MESSAGE, + number=1, + message="SmsTollFraudVerdict", + ) + + class AccountDefenderAssessment(proto.Message): r"""Account defender risk assessment. diff --git a/packages/google-cloud-recaptcha-enterprise/noxfile.py b/packages/google-cloud-recaptcha-enterprise/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-recaptcha-enterprise/noxfile.py +++ b/packages/google-cloud-recaptcha-enterprise/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index aa554428ea59..4e9642cd442b 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "0.1.0" + "version": "1.21.0" }, "snippets": [ { diff --git a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py index a5627dfe3e0e..c31c7630c22a 100644 --- a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py +++ b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py @@ -2738,13 +2738,13 @@ def test_list_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5898,13 +5898,13 @@ def test_list_firewall_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_firewall_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8084,13 +8084,13 @@ def test_list_related_account_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_related_account_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8693,13 +8693,13 @@ def test_list_related_account_group_memberships_pager(transport_name: str = "grp RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_related_account_group_memberships(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9320,13 +9320,13 @@ def test_search_related_account_group_memberships_pager(transport_name: str = "g RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) pager = client.search_related_account_group_memberships(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py index 78588ca5c3d1..b29bb64d4e2a 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py @@ -2394,13 +2394,13 @@ def test_list_catalog_items_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_catalog_items(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py index e0006ce86fb3..6bd3ae81e2fc 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py @@ -2099,13 +2099,13 @@ def test_list_prediction_api_key_registrations_pager(transport_name: str = "grpc RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_prediction_api_key_registrations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py index f66db3751468..1ef84e411858 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py @@ -1594,13 +1594,13 @@ def test_predict_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.predict(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py index d28e420c1a3c..d87a07944612 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py @@ -2403,13 +2403,13 @@ def test_list_user_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_user_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py index e75ad1da504f..0339bb126528 100644 --- a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py +++ b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py @@ -1504,13 +1504,13 @@ def test_list_insights_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_insights(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2938,13 +2938,13 @@ def test_list_recommendations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_recommendations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py index 5ca0fcbe1a84..935837c7fd3e 100644 --- a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py +++ b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py @@ -1504,13 +1504,13 @@ def test_list_insights_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_insights(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2938,13 +2938,13 @@ def test_list_recommendations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_recommendations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6772,10 +6772,10 @@ def test_list_recommenders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_recommenders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7208,10 +7208,10 @@ def test_list_insight_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_insight_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-redis-cluster/CHANGELOG.md b/packages/google-cloud-redis-cluster/CHANGELOG.md index f508072b1048..9edf58c367cd 100644 --- a/packages/google-cloud-redis-cluster/CHANGELOG.md +++ b/packages/google-cloud-redis-cluster/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-redis-cluster-v0.1.5...google-cloud-redis-cluster-v0.1.6) (2024-06-05) + + +### Features + +* [Memorystore for Redis Cluster] Add persistence support ([6a70a4b](https://github.com/googleapis/google-cloud-python/commit/6a70a4b7c968ba28af488f6ab5ac78c66a8c2b98)) +* [Memorystore for Redis Cluster] Add support for different node types ([6a70a4b](https://github.com/googleapis/google-cloud-python/commit/6a70a4b7c968ba28af488f6ab5ac78c66a8c2b98)) +* [Memorystore for Redis Cluster] Get details of certificate authority from redis cluster ([6a70a4b](https://github.com/googleapis/google-cloud-python/commit/6a70a4b7c968ba28af488f6ab5ac78c66a8c2b98)) + + +### Documentation + +* [Memorystore for Redis Cluster] size_gb field shows the size of the cluster rounded up to the next integer, precise_size_gb field will show the exact size of the cluster ([6a70a4b](https://github.com/googleapis/google-cloud-python/commit/6a70a4b7c968ba28af488f6ab5ac78c66a8c2b98)) + ## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-redis-cluster-v0.1.4...google-cloud-redis-cluster-v0.1.5) (2024-03-05) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/__init__.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/__init__.py index d88d0b9dc375..035c70c004a0 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/__init__.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/__init__.py @@ -26,27 +26,35 @@ ) from google.cloud.redis_cluster_v1.types.cloud_redis_cluster import ( AuthorizationMode, + CertificateAuthority, Cluster, + ClusterPersistenceConfig, CreateClusterRequest, DeleteClusterRequest, DiscoveryEndpoint, + GetClusterCertificateAuthorityRequest, GetClusterRequest, ListClustersRequest, ListClustersResponse, + NodeType, OperationMetadata, PscConfig, PscConnection, TransitEncryptionMode, UpdateClusterRequest, + ZoneDistributionConfig, ) __all__ = ( "CloudRedisClusterClient", "CloudRedisClusterAsyncClient", + "CertificateAuthority", "Cluster", + "ClusterPersistenceConfig", "CreateClusterRequest", "DeleteClusterRequest", "DiscoveryEndpoint", + "GetClusterCertificateAuthorityRequest", "GetClusterRequest", "ListClustersRequest", "ListClustersResponse", @@ -54,6 +62,8 @@ "PscConfig", "PscConnection", "UpdateClusterRequest", + "ZoneDistributionConfig", "AuthorizationMode", + "NodeType", "TransitEncryptionMode", ) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py index 558c8aab67c5..51d2795b9d6b 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/__init__.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/__init__.py index 929c2d593c14..6122779a1997 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/__init__.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/__init__.py @@ -24,34 +24,44 @@ ) from .types.cloud_redis_cluster import ( AuthorizationMode, + CertificateAuthority, Cluster, + ClusterPersistenceConfig, CreateClusterRequest, DeleteClusterRequest, DiscoveryEndpoint, + GetClusterCertificateAuthorityRequest, GetClusterRequest, ListClustersRequest, ListClustersResponse, + NodeType, OperationMetadata, PscConfig, PscConnection, TransitEncryptionMode, UpdateClusterRequest, + ZoneDistributionConfig, ) __all__ = ( "CloudRedisClusterAsyncClient", "AuthorizationMode", + "CertificateAuthority", "CloudRedisClusterClient", "Cluster", + "ClusterPersistenceConfig", "CreateClusterRequest", "DeleteClusterRequest", "DiscoveryEndpoint", + "GetClusterCertificateAuthorityRequest", "GetClusterRequest", "ListClustersRequest", "ListClustersResponse", + "NodeType", "OperationMetadata", "PscConfig", "PscConnection", "TransitEncryptionMode", "UpdateClusterRequest", + "ZoneDistributionConfig", ) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_metadata.json b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_metadata.json index bfb2ea820e75..c134ce3dedb6 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_metadata.json +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_metadata.json @@ -25,6 +25,11 @@ "get_cluster" ] }, + "GetClusterCertificateAuthority": { + "methods": [ + "get_cluster_certificate_authority" + ] + }, "ListClusters": { "methods": [ "list_clusters" @@ -55,6 +60,11 @@ "get_cluster" ] }, + "GetClusterCertificateAuthority": { + "methods": [ + "get_cluster_certificate_authority" + ] + }, "ListClusters": { "methods": [ "list_clusters" @@ -85,6 +95,11 @@ "get_cluster" ] }, + "GetClusterCertificateAuthority": { + "methods": [ + "get_cluster_certificate_authority" + ] + }, "ListClusters": { "methods": [ "list_clusters" diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py index 558c8aab67c5..51d2795b9d6b 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py index d09f07c1566a..39126efb1d99 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py @@ -100,6 +100,12 @@ class CloudRedisClusterAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = CloudRedisClusterClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = CloudRedisClusterClient._DEFAULT_UNIVERSE + certificate_authority_path = staticmethod( + CloudRedisClusterClient.certificate_authority_path + ) + parse_certificate_authority_path = staticmethod( + CloudRedisClusterClient.parse_certificate_authority_path + ) cluster_path = staticmethod(CloudRedisClusterClient.cluster_path) parse_cluster_path = staticmethod(CloudRedisClusterClient.parse_cluster_path) common_billing_account_path = staticmethod( @@ -969,6 +975,117 @@ async def sample_create_cluster(): # Done; return the response. return response + async def get_cluster_certificate_authority( + self, + request: Optional[ + Union[cloud_redis_cluster.GetClusterCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis_cluster.CertificateAuthority: + r"""Gets the details of certificate authority information + for Redis cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_cluster_v1 + + async def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1.CloudRedisClusterAsyncClient() + + # Initialize request argument(s) + request = redis_cluster_v1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_cluster_v1.types.GetClusterCertificateAuthorityRequest, dict]]): + The request object. Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + name (:class:`str`): + Required. Redis cluster certificate authority resource + name using the form: + ``projects/{project_id}/locations/{location_id}/clusters/{cluster_id}/certificateAuthority`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_cluster_v1.types.CertificateAuthority: + Redis cluster certificate authority + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, cloud_redis_cluster.GetClusterCertificateAuthorityRequest + ): + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cluster_certificate_authority + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py index 4145237dcd72..e0dcd55a8ed8 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py @@ -220,6 +220,28 @@ def transport(self) -> CloudRedisClusterTransport: """ return self._transport + @staticmethod + def certificate_authority_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified certificate_authority string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/certificateAuthority".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_certificate_authority_path(path: str) -> Dict[str, str]: + """Parses a certificate_authority path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/certificateAuthority$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def cluster_path( project: str, @@ -1375,6 +1397,116 @@ def sample_create_cluster(): # Done; return the response. return response + def get_cluster_certificate_authority( + self, + request: Optional[ + Union[cloud_redis_cluster.GetClusterCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis_cluster.CertificateAuthority: + r"""Gets the details of certificate authority information + for Redis cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_cluster_v1 + + def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1.CloudRedisClusterClient() + + # Initialize request argument(s) + request = redis_cluster_v1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_cluster_v1.types.GetClusterCertificateAuthorityRequest, dict]): + The request object. Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + name (str): + Required. Redis cluster certificate authority resource + name using the form: + ``projects/{project_id}/locations/{location_id}/clusters/{cluster_id}/certificateAuthority`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_cluster_v1.types.CertificateAuthority: + Redis cluster certificate authority + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, cloud_redis_cluster.GetClusterCertificateAuthorityRequest + ): + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_cluster_certificate_authority + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CloudRedisClusterClient": return self diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/base.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/base.py index 534e48b609fb..0f1e7f17cc1a 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/base.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/base.py @@ -153,6 +153,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_cluster_certificate_authority: gapic_v1.method.wrap_method( + self.get_cluster_certificate_authority, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -217,6 +222,18 @@ def create_cluster( ]: raise NotImplementedError() + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + Union[ + cloud_redis_cluster.CertificateAuthority, + Awaitable[cloud_redis_cluster.CertificateAuthority], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc.py index ae7000cede07..dfda013dbd0d 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc.py @@ -434,6 +434,39 @@ def create_cluster( ) return self._stubs["create_cluster"] + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + cloud_redis_cluster.CertificateAuthority, + ]: + r"""Return a callable for the get cluster certificate + authority method over gRPC. + + Gets the details of certificate authority information + for Redis cluster. + + Returns: + Callable[[~.GetClusterCertificateAuthorityRequest], + ~.CertificateAuthority]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster_certificate_authority" not in self._stubs: + self._stubs[ + "get_cluster_certificate_authority" + ] = self.grpc_channel.unary_unary( + "/google.cloud.redis.cluster.v1.CloudRedisCluster/GetClusterCertificateAuthority", + request_serializer=cloud_redis_cluster.GetClusterCertificateAuthorityRequest.serialize, + response_deserializer=cloud_redis_cluster.CertificateAuthority.deserialize, + ) + return self._stubs["get_cluster_certificate_authority"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc_asyncio.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc_asyncio.py index 5fc7418b7bd2..dcc783f975fb 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc_asyncio.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/grpc_asyncio.py @@ -448,6 +448,39 @@ def create_cluster( ) return self._stubs["create_cluster"] + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + Awaitable[cloud_redis_cluster.CertificateAuthority], + ]: + r"""Return a callable for the get cluster certificate + authority method over gRPC. + + Gets the details of certificate authority information + for Redis cluster. + + Returns: + Callable[[~.GetClusterCertificateAuthorityRequest], + Awaitable[~.CertificateAuthority]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster_certificate_authority" not in self._stubs: + self._stubs[ + "get_cluster_certificate_authority" + ] = self.grpc_channel.unary_unary( + "/google.cloud.redis.cluster.v1.CloudRedisCluster/GetClusterCertificateAuthority", + request_serializer=cloud_redis_cluster.GetClusterCertificateAuthorityRequest.serialize, + response_deserializer=cloud_redis_cluster.CertificateAuthority.deserialize, + ) + return self._stubs["get_cluster_certificate_authority"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -476,6 +509,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_cluster_certificate_authority: gapic_v1.method_async.wrap_method( + self.get_cluster_certificate_authority, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/rest.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/rest.py index 68b357f9bdd4..b280f43ab323 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/rest.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/transports/rest.py @@ -96,6 +96,14 @@ def post_get_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_get_cluster_certificate_authority(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster_certificate_authority(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_clusters(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -187,6 +195,32 @@ def post_get_cluster( """ return response + def pre_get_cluster_certificate_authority( + self, + request: cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_cluster_certificate_authority + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedisCluster server. + """ + return request, metadata + + def post_get_cluster_certificate_authority( + self, response: cloud_redis_cluster.CertificateAuthority + ) -> cloud_redis_cluster.CertificateAuthority: + """Post-rpc interceptor for get_cluster_certificate_authority + + Override in a subclass to manipulate the response + after it is returned by the CloudRedisCluster server but before + it is returned to user code. + """ + return response + def pre_list_clusters( self, request: cloud_redis_cluster.ListClustersRequest, @@ -810,6 +844,97 @@ def __call__( resp = self._interceptor.post_get_cluster(resp) return resp + class _GetClusterCertificateAuthority(CloudRedisClusterRestStub): + def __hash__(self): + return hash("GetClusterCertificateAuthority") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis_cluster.CertificateAuthority: + r"""Call the get cluster certificate + authority method over HTTP. + + Args: + request (~.cloud_redis_cluster.GetClusterCertificateAuthorityRequest): + The request object. Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis_cluster.CertificateAuthority: + Redis cluster certificate authority + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/certificateAuthority}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster_certificate_authority( + request, metadata + ) + pb_request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis_cluster.CertificateAuthority() + pb_resp = cloud_redis_cluster.CertificateAuthority.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster_certificate_authority(resp) + return resp + class _ListClusters(CloudRedisClusterRestStub): def __hash__(self): return hash("ListClusters") @@ -1014,6 +1139,17 @@ def get_cluster( # In C++ this would require a dynamic_cast return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + cloud_redis_cluster.CertificateAuthority, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetClusterCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + @property def list_clusters( self, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/__init__.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/__init__.py index 6b145cb8610b..b7bd90233770 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/__init__.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/__init__.py @@ -15,25 +15,33 @@ # from .cloud_redis_cluster import ( AuthorizationMode, + CertificateAuthority, Cluster, + ClusterPersistenceConfig, CreateClusterRequest, DeleteClusterRequest, DiscoveryEndpoint, + GetClusterCertificateAuthorityRequest, GetClusterRequest, ListClustersRequest, ListClustersResponse, + NodeType, OperationMetadata, PscConfig, PscConnection, TransitEncryptionMode, UpdateClusterRequest, + ZoneDistributionConfig, ) __all__ = ( + "CertificateAuthority", "Cluster", + "ClusterPersistenceConfig", "CreateClusterRequest", "DeleteClusterRequest", "DiscoveryEndpoint", + "GetClusterCertificateAuthorityRequest", "GetClusterRequest", "ListClustersRequest", "ListClustersResponse", @@ -41,6 +49,8 @@ "PscConfig", "PscConnection", "UpdateClusterRequest", + "ZoneDistributionConfig", "AuthorizationMode", + "NodeType", "TransitEncryptionMode", ) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py index 075f7649ebfc..9b7b1155cf1d 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py @@ -25,6 +25,7 @@ package="google.cloud.redis.cluster.v1", manifest={ "AuthorizationMode", + "NodeType", "TransitEncryptionMode", "CreateClusterRequest", "ListClustersRequest", @@ -32,11 +33,15 @@ "UpdateClusterRequest", "GetClusterRequest", "DeleteClusterRequest", + "GetClusterCertificateAuthorityRequest", "Cluster", "PscConfig", "DiscoveryEndpoint", "PscConnection", "OperationMetadata", + "CertificateAuthority", + "ClusterPersistenceConfig", + "ZoneDistributionConfig", }, ) @@ -57,6 +62,28 @@ class AuthorizationMode(proto.Enum): AUTH_MODE_DISABLED = 2 +class NodeType(proto.Enum): + r"""NodeType of a redis cluster node, + + Values: + NODE_TYPE_UNSPECIFIED (0): + No description available. + REDIS_SHARED_CORE_NANO (1): + Redis shared core nano node_type. + REDIS_HIGHMEM_MEDIUM (2): + Redis highmem medium node_type. + REDIS_HIGHMEM_XLARGE (3): + Redis highmem xlarge node_type. + REDIS_STANDARD_SMALL (4): + Redis standard small node_type. + """ + NODE_TYPE_UNSPECIFIED = 0 + REDIS_SHARED_CORE_NANO = 1 + REDIS_HIGHMEM_MEDIUM = 2 + REDIS_HIGHMEM_XLARGE = 3 + REDIS_STANDARD_SMALL = 4 + + class TransitEncryptionMode(proto.Enum): r"""Available mode of in-transit encryption. @@ -270,6 +297,24 @@ class DeleteClusterRequest(proto.Message): ) +class GetClusterCertificateAuthorityRequest(proto.Message): + r"""Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + + Attributes: + name (str): + Required. Redis cluster certificate authority resource name + using the form: + ``projects/{project_id}/locations/{location_id}/clusters/{cluster_id}/certificateAuthority`` + where ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class Cluster(proto.Message): r"""A cluster instance. @@ -305,7 +350,7 @@ class Cluster(proto.Message): disabled for the cluster. size_gb (int): Output only. Redis memory size in GB for the - entire cluster. + entire cluster rounded up to the next integer. This field is a member of `oneof`_ ``_size_gb``. shard_count (int): @@ -330,6 +375,30 @@ class Cluster(proto.Message): state_info (google.cloud.redis_cluster_v1.types.Cluster.StateInfo): Output only. Additional information about the current state of the cluster. + node_type (google.cloud.redis_cluster_v1.types.NodeType): + Optional. The type of a redis node in the + cluster. NodeType determines the underlying + machine-type of a redis node. + persistence_config (google.cloud.redis_cluster_v1.types.ClusterPersistenceConfig): + Optional. Persistence config (RDB, AOF) for + the cluster. + redis_configs (MutableMapping[str, str]): + Optional. Key/Value pairs of customer + overrides for mutable Redis Configs + precise_size_gb (float): + Output only. Precise value of redis memory + size in GB for the entire cluster. + + This field is a member of `oneof`_ ``_precise_size_gb``. + zone_distribution_config (google.cloud.redis_cluster_v1.types.ZoneDistributionConfig): + Optional. This config will be used to + determine how the customer wants us to + distribute cluster resources within the region. + deletion_protection_enabled (bool): + Optional. The delete operation will fail when + the value is set to true. + + This field is a member of `oneof`_ ``_deletion_protection_enabled``. """ class State(proto.Enum): @@ -466,6 +535,36 @@ class UpdateInfo(proto.Message): number=18, message=StateInfo, ) + node_type: "NodeType" = proto.Field( + proto.ENUM, + number=19, + enum="NodeType", + ) + persistence_config: "ClusterPersistenceConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="ClusterPersistenceConfig", + ) + redis_configs: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=21, + ) + precise_size_gb: float = proto.Field( + proto.DOUBLE, + number=22, + optional=True, + ) + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( + proto.MESSAGE, + number=23, + message="ZoneDistributionConfig", + ) + deletion_protection_enabled: bool = proto.Field( + proto.BOOL, + number=25, + optional=True, + ) class PscConfig(proto.Message): @@ -625,4 +724,242 @@ class OperationMetadata(proto.Message): ) +class CertificateAuthority(proto.Message): + r"""Redis cluster certificate authority + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_server_ca (google.cloud.redis_cluster_v1.types.CertificateAuthority.ManagedCertificateAuthority): + + This field is a member of `oneof`_ ``server_ca``. + name (str): + Identifier. Unique name of the resource in this scope + including project, location and cluster using the form: + ``projects/{project}/locations/{location}/clusters/{cluster}/certificateAuthority`` + """ + + class ManagedCertificateAuthority(proto.Message): + r""" + + Attributes: + ca_certs (MutableSequence[google.cloud.redis_cluster_v1.types.CertificateAuthority.ManagedCertificateAuthority.CertChain]): + The PEM encoded CA certificate chains for + redis managed server authentication + """ + + class CertChain(proto.Message): + r""" + + Attributes: + certificates (MutableSequence[str]): + The certificates that form the CA chain, from + leaf to root order. + """ + + certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + ca_certs: MutableSequence[ + "CertificateAuthority.ManagedCertificateAuthority.CertChain" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CertificateAuthority.ManagedCertificateAuthority.CertChain", + ) + + managed_server_ca: ManagedCertificateAuthority = proto.Field( + proto.MESSAGE, + number=1, + oneof="server_ca", + message=ManagedCertificateAuthority, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ClusterPersistenceConfig(proto.Message): + r"""Configuration of the persistence functionality. + + Attributes: + mode (google.cloud.redis_cluster_v1.types.ClusterPersistenceConfig.PersistenceMode): + Optional. The mode of persistence. + rdb_config (google.cloud.redis_cluster_v1.types.ClusterPersistenceConfig.RDBConfig): + Optional. RDB configuration. This field will + be ignored if mode is not RDB. + aof_config (google.cloud.redis_cluster_v1.types.ClusterPersistenceConfig.AOFConfig): + Optional. AOF configuration. This field will + be ignored if mode is not AOF. + """ + + class PersistenceMode(proto.Enum): + r"""Available persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled, and any snapshot + data is deleted. + RDB (2): + RDB based persistence is enabled. + AOF (3): + AOF based persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + AOF = 3 + + class RDBConfig(proto.Message): + r"""Configuration of the RDB based persistence. + + Attributes: + rdb_snapshot_period (google.cloud.redis_cluster_v1.types.ClusterPersistenceConfig.RDBConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time that the first snapshot + was/will be attempted, and to which future + snapshots will be aligned. If not provided, the + current time will be used. + """ + + class SnapshotPeriod(proto.Enum): + r"""Available snapshot periods. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (1): + One hour. + SIX_HOURS (2): + Six hours. + TWELVE_HOURS (3): + Twelve hours. + TWENTY_FOUR_HOURS (4): + Twenty four hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 1 + SIX_HOURS = 2 + TWELVE_HOURS = 3 + TWENTY_FOUR_HOURS = 4 + + rdb_snapshot_period: "ClusterPersistenceConfig.RDBConfig.SnapshotPeriod" = ( + proto.Field( + proto.ENUM, + number=1, + enum="ClusterPersistenceConfig.RDBConfig.SnapshotPeriod", + ) + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class AOFConfig(proto.Message): + r"""Configuration of the AOF based persistence. + + Attributes: + append_fsync (google.cloud.redis_cluster_v1.types.ClusterPersistenceConfig.AOFConfig.AppendFsync): + Optional. fsync configuration. + """ + + class AppendFsync(proto.Enum): + r"""Available fsync modes. + + Values: + APPEND_FSYNC_UNSPECIFIED (0): + Not set. Default: EVERYSEC + NO (1): + Never fsync. Normally Linux will flush data + every 30 seconds with this configuration, but + it's up to the kernel's exact tuning. + EVERYSEC (2): + fsync every second. Fast enough, and you may + lose 1 second of data if there is a disaster + ALWAYS (3): + fsync every time new commands are appended to + the AOF. It has the best data loss protection at + the cost of performance + """ + APPEND_FSYNC_UNSPECIFIED = 0 + NO = 1 + EVERYSEC = 2 + ALWAYS = 3 + + append_fsync: "ClusterPersistenceConfig.AOFConfig.AppendFsync" = proto.Field( + proto.ENUM, + number=1, + enum="ClusterPersistenceConfig.AOFConfig.AppendFsync", + ) + + mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_config: RDBConfig = proto.Field( + proto.MESSAGE, + number=2, + message=RDBConfig, + ) + aof_config: AOFConfig = proto.Field( + proto.MESSAGE, + number=3, + message=AOFConfig, + ) + + +class ZoneDistributionConfig(proto.Message): + r"""Zone distribution config for allocation of cluster resources. + + Attributes: + mode (google.cloud.redis_cluster_v1.types.ZoneDistributionConfig.ZoneDistributionMode): + Optional. The mode of zone distribution. Defaults to + MULTI_ZONE, when not specified. + zone (str): + Optional. When SINGLE ZONE distribution is selected, zone + field would be used to allocate all resources in that zone. + This is not applicable to MULTI_ZONE, and would be ignored + for MULTI_ZONE clusters. + """ + + class ZoneDistributionMode(proto.Enum): + r"""Defines various modes of zone distribution. + Currently supports two modes, can be expanded in future to + support more types of distribution modes. + design doc: go/same-zone-cluster + + Values: + ZONE_DISTRIBUTION_MODE_UNSPECIFIED (0): + Not Set. Default: MULTI_ZONE + MULTI_ZONE (1): + Distribute all resources across 3 zones + picked at random, within the region. + SINGLE_ZONE (2): + Distribute all resources in a single zone. + The zone field must be specified, when this mode + is selected. + """ + ZONE_DISTRIBUTION_MODE_UNSPECIFIED = 0 + MULTI_ZONE = 1 + SINGLE_ZONE = 2 + + mode: ZoneDistributionMode = proto.Field( + proto.ENUM, + number=1, + enum=ZoneDistributionMode, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/__init__.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/__init__.py index 70363e520fad..57add71f7bec 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/__init__.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/__init__.py @@ -24,34 +24,44 @@ ) from .types.cloud_redis_cluster import ( AuthorizationMode, + CertificateAuthority, Cluster, + ClusterPersistenceConfig, CreateClusterRequest, DeleteClusterRequest, DiscoveryEndpoint, + GetClusterCertificateAuthorityRequest, GetClusterRequest, ListClustersRequest, ListClustersResponse, + NodeType, OperationMetadata, PscConfig, PscConnection, TransitEncryptionMode, UpdateClusterRequest, + ZoneDistributionConfig, ) __all__ = ( "CloudRedisClusterAsyncClient", "AuthorizationMode", + "CertificateAuthority", "CloudRedisClusterClient", "Cluster", + "ClusterPersistenceConfig", "CreateClusterRequest", "DeleteClusterRequest", "DiscoveryEndpoint", + "GetClusterCertificateAuthorityRequest", "GetClusterRequest", "ListClustersRequest", "ListClustersResponse", + "NodeType", "OperationMetadata", "PscConfig", "PscConnection", "TransitEncryptionMode", "UpdateClusterRequest", + "ZoneDistributionConfig", ) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_metadata.json b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_metadata.json index 673d281018ed..57715b8b0180 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_metadata.json +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_metadata.json @@ -25,6 +25,11 @@ "get_cluster" ] }, + "GetClusterCertificateAuthority": { + "methods": [ + "get_cluster_certificate_authority" + ] + }, "ListClusters": { "methods": [ "list_clusters" @@ -55,6 +60,11 @@ "get_cluster" ] }, + "GetClusterCertificateAuthority": { + "methods": [ + "get_cluster_certificate_authority" + ] + }, "ListClusters": { "methods": [ "list_clusters" @@ -85,6 +95,11 @@ "get_cluster" ] }, + "GetClusterCertificateAuthority": { + "methods": [ + "get_cluster_certificate_authority" + ] + }, "ListClusters": { "methods": [ "list_clusters" diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py index 558c8aab67c5..51d2795b9d6b 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py index d550bc94fc30..0df6afc949c2 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py @@ -100,6 +100,12 @@ class CloudRedisClusterAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = CloudRedisClusterClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = CloudRedisClusterClient._DEFAULT_UNIVERSE + certificate_authority_path = staticmethod( + CloudRedisClusterClient.certificate_authority_path + ) + parse_certificate_authority_path = staticmethod( + CloudRedisClusterClient.parse_certificate_authority_path + ) cluster_path = staticmethod(CloudRedisClusterClient.cluster_path) parse_cluster_path = staticmethod(CloudRedisClusterClient.parse_cluster_path) common_billing_account_path = staticmethod( @@ -969,6 +975,117 @@ async def sample_create_cluster(): # Done; return the response. return response + async def get_cluster_certificate_authority( + self, + request: Optional[ + Union[cloud_redis_cluster.GetClusterCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis_cluster.CertificateAuthority: + r"""Gets the details of certificate authority information + for Redis cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_cluster_v1beta1 + + async def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1beta1.CloudRedisClusterAsyncClient() + + # Initialize request argument(s) + request = redis_cluster_v1beta1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_cluster_v1beta1.types.GetClusterCertificateAuthorityRequest, dict]]): + The request object. Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + name (:class:`str`): + Required. Redis cluster certificate authority resource + name using the form: + ``projects/{project_id}/locations/{location_id}/clusters/{cluster_id}/certificateAuthority`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_cluster_v1beta1.types.CertificateAuthority: + Redis cluster certificate authority + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, cloud_redis_cluster.GetClusterCertificateAuthorityRequest + ): + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cluster_certificate_authority + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py index 62d2caba35f4..a0edca844b7a 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py @@ -220,6 +220,28 @@ def transport(self) -> CloudRedisClusterTransport: """ return self._transport + @staticmethod + def certificate_authority_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified certificate_authority string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/certificateAuthority".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_certificate_authority_path(path: str) -> Dict[str, str]: + """Parses a certificate_authority path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/certificateAuthority$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def cluster_path( project: str, @@ -1375,6 +1397,116 @@ def sample_create_cluster(): # Done; return the response. return response + def get_cluster_certificate_authority( + self, + request: Optional[ + Union[cloud_redis_cluster.GetClusterCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis_cluster.CertificateAuthority: + r"""Gets the details of certificate authority information + for Redis cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_cluster_v1beta1 + + def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1beta1.CloudRedisClusterClient() + + # Initialize request argument(s) + request = redis_cluster_v1beta1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_cluster_v1beta1.types.GetClusterCertificateAuthorityRequest, dict]): + The request object. Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + name (str): + Required. Redis cluster certificate authority resource + name using the form: + ``projects/{project_id}/locations/{location_id}/clusters/{cluster_id}/certificateAuthority`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_cluster_v1beta1.types.CertificateAuthority: + Redis cluster certificate authority + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, cloud_redis_cluster.GetClusterCertificateAuthorityRequest + ): + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_cluster_certificate_authority + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CloudRedisClusterClient": return self diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/base.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/base.py index 0f53895e948e..5e9b6b07334e 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/base.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/base.py @@ -153,6 +153,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_cluster_certificate_authority: gapic_v1.method.wrap_method( + self.get_cluster_certificate_authority, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -217,6 +222,18 @@ def create_cluster( ]: raise NotImplementedError() + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + Union[ + cloud_redis_cluster.CertificateAuthority, + Awaitable[cloud_redis_cluster.CertificateAuthority], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc.py index 1962178cdfb1..1172034d147e 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc.py @@ -434,6 +434,39 @@ def create_cluster( ) return self._stubs["create_cluster"] + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + cloud_redis_cluster.CertificateAuthority, + ]: + r"""Return a callable for the get cluster certificate + authority method over gRPC. + + Gets the details of certificate authority information + for Redis cluster. + + Returns: + Callable[[~.GetClusterCertificateAuthorityRequest], + ~.CertificateAuthority]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster_certificate_authority" not in self._stubs: + self._stubs[ + "get_cluster_certificate_authority" + ] = self.grpc_channel.unary_unary( + "/google.cloud.redis.cluster.v1beta1.CloudRedisCluster/GetClusterCertificateAuthority", + request_serializer=cloud_redis_cluster.GetClusterCertificateAuthorityRequest.serialize, + response_deserializer=cloud_redis_cluster.CertificateAuthority.deserialize, + ) + return self._stubs["get_cluster_certificate_authority"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc_asyncio.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc_asyncio.py index f1e95ba4b184..4d75303454ef 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc_asyncio.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/grpc_asyncio.py @@ -448,6 +448,39 @@ def create_cluster( ) return self._stubs["create_cluster"] + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + Awaitable[cloud_redis_cluster.CertificateAuthority], + ]: + r"""Return a callable for the get cluster certificate + authority method over gRPC. + + Gets the details of certificate authority information + for Redis cluster. + + Returns: + Callable[[~.GetClusterCertificateAuthorityRequest], + Awaitable[~.CertificateAuthority]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster_certificate_authority" not in self._stubs: + self._stubs[ + "get_cluster_certificate_authority" + ] = self.grpc_channel.unary_unary( + "/google.cloud.redis.cluster.v1beta1.CloudRedisCluster/GetClusterCertificateAuthority", + request_serializer=cloud_redis_cluster.GetClusterCertificateAuthorityRequest.serialize, + response_deserializer=cloud_redis_cluster.CertificateAuthority.deserialize, + ) + return self._stubs["get_cluster_certificate_authority"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -476,6 +509,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_cluster_certificate_authority: gapic_v1.method_async.wrap_method( + self.get_cluster_certificate_authority, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/rest.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/rest.py index 418694c46220..1ee6e010f804 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/rest.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/transports/rest.py @@ -96,6 +96,14 @@ def post_get_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_get_cluster_certificate_authority(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster_certificate_authority(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_clusters(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -187,6 +195,32 @@ def post_get_cluster( """ return response + def pre_get_cluster_certificate_authority( + self, + request: cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_cluster_certificate_authority + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedisCluster server. + """ + return request, metadata + + def post_get_cluster_certificate_authority( + self, response: cloud_redis_cluster.CertificateAuthority + ) -> cloud_redis_cluster.CertificateAuthority: + """Post-rpc interceptor for get_cluster_certificate_authority + + Override in a subclass to manipulate the response + after it is returned by the CloudRedisCluster server but before + it is returned to user code. + """ + return response + def pre_list_clusters( self, request: cloud_redis_cluster.ListClustersRequest, @@ -810,6 +844,97 @@ def __call__( resp = self._interceptor.post_get_cluster(resp) return resp + class _GetClusterCertificateAuthority(CloudRedisClusterRestStub): + def __hash__(self): + return hash("GetClusterCertificateAuthority") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis_cluster.CertificateAuthority: + r"""Call the get cluster certificate + authority method over HTTP. + + Args: + request (~.cloud_redis_cluster.GetClusterCertificateAuthorityRequest): + The request object. Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis_cluster.CertificateAuthority: + Redis cluster certificate authority + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/clusters/*/certificateAuthority}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster_certificate_authority( + request, metadata + ) + pb_request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis_cluster.CertificateAuthority() + pb_resp = cloud_redis_cluster.CertificateAuthority.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster_certificate_authority(resp) + return resp + class _ListClusters(CloudRedisClusterRestStub): def __hash__(self): return hash("ListClusters") @@ -1014,6 +1139,17 @@ def get_cluster( # In C++ this would require a dynamic_cast return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def get_cluster_certificate_authority( + self, + ) -> Callable[ + [cloud_redis_cluster.GetClusterCertificateAuthorityRequest], + cloud_redis_cluster.CertificateAuthority, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetClusterCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + @property def list_clusters( self, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/__init__.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/__init__.py index 6b145cb8610b..b7bd90233770 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/__init__.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/__init__.py @@ -15,25 +15,33 @@ # from .cloud_redis_cluster import ( AuthorizationMode, + CertificateAuthority, Cluster, + ClusterPersistenceConfig, CreateClusterRequest, DeleteClusterRequest, DiscoveryEndpoint, + GetClusterCertificateAuthorityRequest, GetClusterRequest, ListClustersRequest, ListClustersResponse, + NodeType, OperationMetadata, PscConfig, PscConnection, TransitEncryptionMode, UpdateClusterRequest, + ZoneDistributionConfig, ) __all__ = ( + "CertificateAuthority", "Cluster", + "ClusterPersistenceConfig", "CreateClusterRequest", "DeleteClusterRequest", "DiscoveryEndpoint", + "GetClusterCertificateAuthorityRequest", "GetClusterRequest", "ListClustersRequest", "ListClustersResponse", @@ -41,6 +49,8 @@ "PscConfig", "PscConnection", "UpdateClusterRequest", + "ZoneDistributionConfig", "AuthorizationMode", + "NodeType", "TransitEncryptionMode", ) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py index d5847f4c8bcf..bb33c98a38fe 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py @@ -25,6 +25,7 @@ package="google.cloud.redis.cluster.v1beta1", manifest={ "AuthorizationMode", + "NodeType", "TransitEncryptionMode", "CreateClusterRequest", "ListClustersRequest", @@ -32,11 +33,15 @@ "UpdateClusterRequest", "GetClusterRequest", "DeleteClusterRequest", + "GetClusterCertificateAuthorityRequest", "Cluster", "PscConfig", "DiscoveryEndpoint", "PscConnection", "OperationMetadata", + "CertificateAuthority", + "ClusterPersistenceConfig", + "ZoneDistributionConfig", }, ) @@ -57,6 +62,28 @@ class AuthorizationMode(proto.Enum): AUTH_MODE_DISABLED = 2 +class NodeType(proto.Enum): + r"""NodeType of a redis cluster node, + + Values: + NODE_TYPE_UNSPECIFIED (0): + No description available. + REDIS_SHARED_CORE_NANO (1): + Redis shared core nano node_type. + REDIS_HIGHMEM_MEDIUM (2): + Redis highmem medium node_type. + REDIS_HIGHMEM_XLARGE (3): + Redis highmem xlarge node_type. + REDIS_STANDARD_SMALL (4): + Redis standard small node_type. + """ + NODE_TYPE_UNSPECIFIED = 0 + REDIS_SHARED_CORE_NANO = 1 + REDIS_HIGHMEM_MEDIUM = 2 + REDIS_HIGHMEM_XLARGE = 3 + REDIS_STANDARD_SMALL = 4 + + class TransitEncryptionMode(proto.Enum): r"""Available mode of in-transit encryption. @@ -270,6 +297,24 @@ class DeleteClusterRequest(proto.Message): ) +class GetClusterCertificateAuthorityRequest(proto.Message): + r"""Request for + [GetClusterCertificateAuthorityRequest][CloudRedis.GetClusterCertificateAuthorityRequest]. + + Attributes: + name (str): + Required. Redis cluster certificate authority resource name + using the form: + ``projects/{project_id}/locations/{location_id}/clusters/{cluster_id}/certificateAuthority`` + where ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class Cluster(proto.Message): r"""A cluster instance. @@ -305,7 +350,7 @@ class Cluster(proto.Message): disabled for the cluster. size_gb (int): Output only. Redis memory size in GB for the - entire cluster. + entire cluster rounded up to the next integer. This field is a member of `oneof`_ ``_size_gb``. shard_count (int): @@ -330,6 +375,30 @@ class Cluster(proto.Message): state_info (google.cloud.redis_cluster_v1beta1.types.Cluster.StateInfo): Output only. Additional information about the current state of the cluster. + node_type (google.cloud.redis_cluster_v1beta1.types.NodeType): + Optional. The type of a redis node in the + cluster. NodeType determines the underlying + machine-type of a redis node. + persistence_config (google.cloud.redis_cluster_v1beta1.types.ClusterPersistenceConfig): + Optional. Persistence config (RDB, AOF) for + the cluster. + redis_configs (MutableMapping[str, str]): + Optional. Key/Value pairs of customer + overrides for mutable Redis Configs + precise_size_gb (float): + Output only. Precise value of redis memory + size in GB for the entire cluster. + + This field is a member of `oneof`_ ``_precise_size_gb``. + zone_distribution_config (google.cloud.redis_cluster_v1beta1.types.ZoneDistributionConfig): + Optional. This config will be used to + determine how the customer wants us to + distribute cluster resources within the region. + deletion_protection_enabled (bool): + Optional. The delete operation will fail when + the value is set to true. + + This field is a member of `oneof`_ ``_deletion_protection_enabled``. """ class State(proto.Enum): @@ -466,6 +535,36 @@ class UpdateInfo(proto.Message): number=18, message=StateInfo, ) + node_type: "NodeType" = proto.Field( + proto.ENUM, + number=19, + enum="NodeType", + ) + persistence_config: "ClusterPersistenceConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="ClusterPersistenceConfig", + ) + redis_configs: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=21, + ) + precise_size_gb: float = proto.Field( + proto.DOUBLE, + number=22, + optional=True, + ) + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( + proto.MESSAGE, + number=23, + message="ZoneDistributionConfig", + ) + deletion_protection_enabled: bool = proto.Field( + proto.BOOL, + number=25, + optional=True, + ) class PscConfig(proto.Message): @@ -625,4 +724,242 @@ class OperationMetadata(proto.Message): ) +class CertificateAuthority(proto.Message): + r"""Redis cluster certificate authority + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_server_ca (google.cloud.redis_cluster_v1beta1.types.CertificateAuthority.ManagedCertificateAuthority): + + This field is a member of `oneof`_ ``server_ca``. + name (str): + Identifier. Unique name of the resource in this scope + including project, location and cluster using the form: + ``projects/{project}/locations/{location}/clusters/{cluster}/certificateAuthority`` + """ + + class ManagedCertificateAuthority(proto.Message): + r""" + + Attributes: + ca_certs (MutableSequence[google.cloud.redis_cluster_v1beta1.types.CertificateAuthority.ManagedCertificateAuthority.CertChain]): + The PEM encoded CA certificate chains for + redis managed server authentication + """ + + class CertChain(proto.Message): + r""" + + Attributes: + certificates (MutableSequence[str]): + The certificates that form the CA chain, from + leaf to root order. + """ + + certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + ca_certs: MutableSequence[ + "CertificateAuthority.ManagedCertificateAuthority.CertChain" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CertificateAuthority.ManagedCertificateAuthority.CertChain", + ) + + managed_server_ca: ManagedCertificateAuthority = proto.Field( + proto.MESSAGE, + number=1, + oneof="server_ca", + message=ManagedCertificateAuthority, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ClusterPersistenceConfig(proto.Message): + r"""Configuration of the persistence functionality. + + Attributes: + mode (google.cloud.redis_cluster_v1beta1.types.ClusterPersistenceConfig.PersistenceMode): + Optional. The mode of persistence. + rdb_config (google.cloud.redis_cluster_v1beta1.types.ClusterPersistenceConfig.RDBConfig): + Optional. RDB configuration. This field will + be ignored if mode is not RDB. + aof_config (google.cloud.redis_cluster_v1beta1.types.ClusterPersistenceConfig.AOFConfig): + Optional. AOF configuration. This field will + be ignored if mode is not AOF. + """ + + class PersistenceMode(proto.Enum): + r"""Available persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled, and any snapshot + data is deleted. + RDB (2): + RDB based persistence is enabled. + AOF (3): + AOF based persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + AOF = 3 + + class RDBConfig(proto.Message): + r"""Configuration of the RDB based persistence. + + Attributes: + rdb_snapshot_period (google.cloud.redis_cluster_v1beta1.types.ClusterPersistenceConfig.RDBConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time that the first snapshot + was/will be attempted, and to which future + snapshots will be aligned. If not provided, the + current time will be used. + """ + + class SnapshotPeriod(proto.Enum): + r"""Available snapshot periods. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (1): + One hour. + SIX_HOURS (2): + Six hours. + TWELVE_HOURS (3): + Twelve hours. + TWENTY_FOUR_HOURS (4): + Twenty four hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 1 + SIX_HOURS = 2 + TWELVE_HOURS = 3 + TWENTY_FOUR_HOURS = 4 + + rdb_snapshot_period: "ClusterPersistenceConfig.RDBConfig.SnapshotPeriod" = ( + proto.Field( + proto.ENUM, + number=1, + enum="ClusterPersistenceConfig.RDBConfig.SnapshotPeriod", + ) + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class AOFConfig(proto.Message): + r"""Configuration of the AOF based persistence. + + Attributes: + append_fsync (google.cloud.redis_cluster_v1beta1.types.ClusterPersistenceConfig.AOFConfig.AppendFsync): + Optional. fsync configuration. + """ + + class AppendFsync(proto.Enum): + r"""Available fsync modes. + + Values: + APPEND_FSYNC_UNSPECIFIED (0): + Not set. Default: EVERYSEC + NO (1): + Never fsync. Normally Linux will flush data + every 30 seconds with this configuration, but + it's up to the kernel's exact tuning. + EVERYSEC (2): + fsync every second. Fast enough, and you may + lose 1 second of data if there is a disaster + ALWAYS (3): + fsync every time new commands are appended to + the AOF. It has the best data loss protection at + the cost of performance + """ + APPEND_FSYNC_UNSPECIFIED = 0 + NO = 1 + EVERYSEC = 2 + ALWAYS = 3 + + append_fsync: "ClusterPersistenceConfig.AOFConfig.AppendFsync" = proto.Field( + proto.ENUM, + number=1, + enum="ClusterPersistenceConfig.AOFConfig.AppendFsync", + ) + + mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_config: RDBConfig = proto.Field( + proto.MESSAGE, + number=2, + message=RDBConfig, + ) + aof_config: AOFConfig = proto.Field( + proto.MESSAGE, + number=3, + message=AOFConfig, + ) + + +class ZoneDistributionConfig(proto.Message): + r"""Zone distribution config for allocation of cluster resources. + + Attributes: + mode (google.cloud.redis_cluster_v1beta1.types.ZoneDistributionConfig.ZoneDistributionMode): + Optional. The mode of zone distribution. Defaults to + MULTI_ZONE, when not specified. + zone (str): + Optional. When SINGLE ZONE distribution is selected, zone + field would be used to allocate all resources in that zone. + This is not applicable to MULTI_ZONE, and would be ignored + for MULTI_ZONE clusters. + """ + + class ZoneDistributionMode(proto.Enum): + r"""Defines various modes of zone distribution. + Currently supports two modes, can be expanded in future to + support more types of distribution modes. + design doc: go/same-zone-cluster + + Values: + ZONE_DISTRIBUTION_MODE_UNSPECIFIED (0): + Not Set. Default: MULTI_ZONE + MULTI_ZONE (1): + Distribute all resources across 3 zones + picked at random, within the region. + SINGLE_ZONE (2): + Distribute all resources in a single zone. + The zone field must be specified, when this mode + is selected. + """ + ZONE_DISTRIBUTION_MODE_UNSPECIFIED = 0 + MULTI_ZONE = 1 + SINGLE_ZONE = 2 + + mode: ZoneDistributionMode = proto.Field( + proto.ENUM, + number=1, + enum=ZoneDistributionMode, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py new file mode 100644 index 000000000000..17d24731b57e --- /dev/null +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetClusterCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis-cluster + + +# [START redis_v1_generated_CloudRedisCluster_GetClusterCertificateAuthority_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_cluster_v1 + + +async def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1.CloudRedisClusterAsyncClient() + + # Initialize request argument(s) + request = redis_cluster_v1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedisCluster_GetClusterCertificateAuthority_async] diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py new file mode 100644 index 000000000000..e374fc6a5282 --- /dev/null +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetClusterCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis-cluster + + +# [START redis_v1_generated_CloudRedisCluster_GetClusterCertificateAuthority_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_cluster_v1 + + +def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1.CloudRedisClusterClient() + + # Initialize request argument(s) + request = redis_cluster_v1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedisCluster_GetClusterCertificateAuthority_sync] diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py new file mode 100644 index 000000000000..14ad74faa0ee --- /dev/null +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetClusterCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis-cluster + + +# [START redis_v1beta1_generated_CloudRedisCluster_GetClusterCertificateAuthority_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_cluster_v1beta1 + + +async def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1beta1.CloudRedisClusterAsyncClient() + + # Initialize request argument(s) + request = redis_cluster_v1beta1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + +# [END redis_v1beta1_generated_CloudRedisCluster_GetClusterCertificateAuthority_async] diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py new file mode 100644 index 000000000000..8c998f848da0 --- /dev/null +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetClusterCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis-cluster + + +# [START redis_v1beta1_generated_CloudRedisCluster_GetClusterCertificateAuthority_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_cluster_v1beta1 + + +def sample_get_cluster_certificate_authority(): + # Create a client + client = redis_cluster_v1beta1.CloudRedisClusterClient() + + # Initialize request argument(s) + request = redis_cluster_v1beta1.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster_certificate_authority(request=request) + + # Handle the response + print(response) + +# [END redis_v1beta1_generated_CloudRedisCluster_GetClusterCertificateAuthority_sync] diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json index 8abe75407a19..a719c7081bc1 100644 --- a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis-cluster", - "version": "0.1.0" + "version": "0.1.6" }, "snippets": [ { @@ -349,6 +349,167 @@ ], "title": "redis_v1_generated_cloud_redis_cluster_delete_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_cluster_v1.CloudRedisClusterAsyncClient", + "shortName": "CloudRedisClusterAsyncClient" + }, + "fullName": "google.cloud.redis_cluster_v1.CloudRedisClusterAsyncClient.get_cluster_certificate_authority", + "method": { + "fullName": "google.cloud.redis.cluster.v1.CloudRedisCluster.GetClusterCertificateAuthority", + "service": { + "fullName": "google.cloud.redis.cluster.v1.CloudRedisCluster", + "shortName": "CloudRedisCluster" + }, + "shortName": "GetClusterCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_cluster_v1.types.GetClusterCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_cluster_v1.types.CertificateAuthority", + "shortName": "get_cluster_certificate_authority" + }, + "description": "Sample for GetClusterCertificateAuthority", + "file": "redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedisCluster_GetClusterCertificateAuthority_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_cluster_v1.CloudRedisClusterClient", + "shortName": "CloudRedisClusterClient" + }, + "fullName": "google.cloud.redis_cluster_v1.CloudRedisClusterClient.get_cluster_certificate_authority", + "method": { + "fullName": "google.cloud.redis.cluster.v1.CloudRedisCluster.GetClusterCertificateAuthority", + "service": { + "fullName": "google.cloud.redis.cluster.v1.CloudRedisCluster", + "shortName": "CloudRedisCluster" + }, + "shortName": "GetClusterCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_cluster_v1.types.GetClusterCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_cluster_v1.types.CertificateAuthority", + "shortName": "get_cluster_certificate_authority" + }, + "description": "Sample for GetClusterCertificateAuthority", + "file": "redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedisCluster_GetClusterCertificateAuthority_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json index 534bbe2d1802..3b39112e53cb 100644 --- a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis-cluster", - "version": "0.1.0" + "version": "0.1.6" }, "snippets": [ { @@ -349,6 +349,167 @@ ], "title": "redis_v1beta1_generated_cloud_redis_cluster_delete_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_cluster_v1beta1.CloudRedisClusterAsyncClient", + "shortName": "CloudRedisClusterAsyncClient" + }, + "fullName": "google.cloud.redis_cluster_v1beta1.CloudRedisClusterAsyncClient.get_cluster_certificate_authority", + "method": { + "fullName": "google.cloud.redis.cluster.v1beta1.CloudRedisCluster.GetClusterCertificateAuthority", + "service": { + "fullName": "google.cloud.redis.cluster.v1beta1.CloudRedisCluster", + "shortName": "CloudRedisCluster" + }, + "shortName": "GetClusterCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_cluster_v1beta1.types.GetClusterCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_cluster_v1beta1.types.CertificateAuthority", + "shortName": "get_cluster_certificate_authority" + }, + "description": "Sample for GetClusterCertificateAuthority", + "file": "redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1beta1_generated_CloudRedisCluster_GetClusterCertificateAuthority_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_cluster_v1beta1.CloudRedisClusterClient", + "shortName": "CloudRedisClusterClient" + }, + "fullName": "google.cloud.redis_cluster_v1beta1.CloudRedisClusterClient.get_cluster_certificate_authority", + "method": { + "fullName": "google.cloud.redis.cluster.v1beta1.CloudRedisCluster.GetClusterCertificateAuthority", + "service": { + "fullName": "google.cloud.redis.cluster.v1beta1.CloudRedisCluster", + "shortName": "CloudRedisCluster" + }, + "shortName": "GetClusterCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_cluster_v1beta1.types.GetClusterCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_cluster_v1beta1.types.CertificateAuthority", + "shortName": "get_cluster_certificate_authority" + }, + "description": "Sample for GetClusterCertificateAuthority", + "file": "redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1beta1_generated_CloudRedisCluster_GetClusterCertificateAuthority_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1beta1_generated_cloud_redis_cluster_get_cluster_certificate_authority_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1_keywords.py b/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1_keywords.py index d7116f5cea62..287e2046204c 100644 --- a/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1_keywords.py +++ b/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1_keywords.py @@ -42,6 +42,7 @@ class redis_clusterCallTransformer(cst.CSTTransformer): 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', ), 'delete_cluster': ('name', 'request_id', ), 'get_cluster': ('name', ), + 'get_cluster_certificate_authority': ('name', ), 'list_clusters': ('parent', 'page_size', 'page_token', ), 'update_cluster': ('update_mask', 'cluster', 'request_id', ), } diff --git a/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1beta1_keywords.py b/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1beta1_keywords.py index d7116f5cea62..287e2046204c 100644 --- a/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1beta1_keywords.py +++ b/packages/google-cloud-redis-cluster/scripts/fixup_redis_cluster_v1beta1_keywords.py @@ -42,6 +42,7 @@ class redis_clusterCallTransformer(cst.CSTTransformer): 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', ), 'delete_cluster': ('name', 'request_id', ), 'get_cluster': ('name', ), + 'get_cluster_certificate_authority': ('name', ), 'list_clusters': ('parent', 'page_size', 'page_token', ), 'update_cluster': ('update_mask', 'cluster', 'request_id', ), } diff --git a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py index 741a2d05bb06..a12c5b80831f 100644 --- a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py @@ -1583,13 +1583,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1765,6 +1765,9 @@ def test_get_cluster(request_type, transport: str = "grpc"): transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, size_gb=739, shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) response = client.get_cluster(request) @@ -1790,6 +1793,9 @@ def test_get_cluster(request_type, transport: str = "grpc"): ) assert response.size_gb == 739 assert response.shard_count == 1178 + assert response.node_type == cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO + assert math.isclose(response.precise_size_gb, 0.15810000000000002, rel_tol=1e-6) + assert response.deletion_protection_enabled is True def test_get_cluster_empty_call(): @@ -1896,6 +1902,9 @@ async def test_get_cluster_empty_call_async(): transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, size_gb=739, shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) ) response = await client.get_cluster() @@ -1976,6 +1985,9 @@ async def test_get_cluster_async( transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, size_gb=739, shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) ) response = await client.get_cluster(request) @@ -2002,6 +2014,9 @@ async def test_get_cluster_async( ) assert response.size_gb == 739 assert response.shard_count == 1178 + assert response.node_type == cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO + assert math.isclose(response.precise_size_gb, 0.15810000000000002, rel_tol=1e-6) + assert response.deletion_protection_enabled is True @pytest.mark.asyncio @@ -3298,6 +3313,399 @@ async def test_create_cluster_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + dict, + ], +) +def test_get_cluster_certificate_authority(request_type, transport: str = "grpc"): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) + response = client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis_cluster.CertificateAuthority) + assert response.name == "name_value" + + +def test_get_cluster_certificate_authority_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster_certificate_authority() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + +def test_get_cluster_certificate_authority_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster_certificate_authority(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis_cluster.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + +def test_get_cluster_certificate_authority_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cluster_certificate_authority + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cluster_certificate_authority + ] = mock_rpc + request = {} + client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) + ) + response = await client.get_cluster_certificate_authority() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cluster_certificate_authority + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cluster_certificate_authority + ] = mock_object + + request = {} + await client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cluster_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_async( + transport: str = "grpc_asyncio", + request_type=cloud_redis_cluster.GetClusterCertificateAuthorityRequest, +): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) + ) + response = await client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis_cluster.CertificateAuthority) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_async_from_dict(): + await test_get_cluster_certificate_authority_async(request_type=dict) + + +def test_get_cluster_certificate_authority_field_headers(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value = cloud_redis_cluster.CertificateAuthority() + client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_field_headers_async(): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority() + ) + await client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_certificate_authority_flattened(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis_cluster.CertificateAuthority() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster_certificate_authority( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_certificate_authority_flattened_error(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster_certificate_authority( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_flattened_async(): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis_cluster.CertificateAuthority() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster_certificate_authority( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_flattened_error_async(): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster_certificate_authority( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3305,42 +3713,442 @@ async def test_create_cluster_flattened_error_async(): dict, ], ) -def test_list_clusters_rest(request_type): +def test_list_clusters_rest(request_type): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis_cluster.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_clusters_rest_required_fields( + request_type=cloud_redis_cluster.ListClustersRequest, +): + transport_class = transports.CloudRedisClusterRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis_cluster.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.CloudRedisClusterRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.CloudRedisClusterRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudRedisClusterRestInterceptor(), + ) + client = CloudRedisClusterClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudRedisClusterRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.CloudRedisClusterRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis_cluster.ListClustersRequest.pb( + cloud_redis_cluster.ListClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_redis_cluster.ListClustersResponse.to_json( + cloud_redis_cluster.ListClustersResponse() + ) + + request = cloud_redis_cluster.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis_cluster.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.ListClustersRequest +): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis_cluster.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + cloud_redis_cluster.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_redis_cluster.ListClustersResponse( + clusters=[ + cloud_redis_cluster.Cluster(), + cloud_redis_cluster.Cluster(), + cloud_redis_cluster.Cluster(), + ], + next_page_token="abc", + ), + cloud_redis_cluster.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + cloud_redis_cluster.ListClustersResponse( + clusters=[ + cloud_redis_cluster.Cluster(), + ], + next_page_token="ghi", + ), + cloud_redis_cluster.ListClustersResponse( + clusters=[ + cloud_redis_cluster.Cluster(), + cloud_redis_cluster.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_redis_cluster.ListClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_redis_cluster.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis_cluster.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.ListClustersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = cloud_redis_cluster.Cluster( + name="name_value", + state=cloud_redis_cluster.Cluster.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH, + transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, + size_gb=739, + shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.get_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_redis_cluster.Cluster) + assert response.name == "name_value" + assert response.state == cloud_redis_cluster.Cluster.State.CREATING + assert response.uid == "uid_value" + assert response.replica_count == 1384 + assert ( + response.authorization_mode + == cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH + ) + assert ( + response.transit_encryption_mode + == cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED + ) + assert response.size_gb == 739 + assert response.shard_count == 1178 + assert response.node_type == cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO + assert math.isclose(response.precise_size_gb, 0.15810000000000002, rel_tol=1e-6) + assert response.deletion_protection_enabled is True -def test_list_clusters_rest_use_cached_wrapped_rpc(): +def test_get_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3354,35 +4162,35 @@ def test_list_clusters_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_clusters in client._transport._wrapped_methods + assert client._transport.get_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc request = {} - client.list_clusters(request) + client.get_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_clusters(request) + client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_clusters_rest_required_fields( - request_type=cloud_redis_cluster.ListClustersRequest, +def test_get_cluster_rest_required_fields( + request_type=cloud_redis_cluster.GetClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3393,28 +4201,21 @@ def test_list_clusters_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).get_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3423,7 +4224,7 @@ def test_list_clusters_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.ListClustersResponse() + return_value = cloud_redis_cluster.Cluster() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3444,38 +4245,30 @@ def test_list_clusters_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.get_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_clusters_rest_unset_required_fields(): +def test_get_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_clusters._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_clusters_rest_interceptors(null_interceptor): +def test_get_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3488,14 +4281,14 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_list_clusters" + transports.CloudRedisClusterRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_list_clusters" + transports.CloudRedisClusterRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.ListClustersRequest.pb( - cloud_redis_cluster.ListClustersRequest() + pb_message = cloud_redis_cluster.GetClusterRequest.pb( + cloud_redis_cluster.GetClusterRequest() ) transcode.return_value = { "method": "post", @@ -3507,19 +4300,19 @@ def test_list_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis_cluster.ListClustersResponse.to_json( - cloud_redis_cluster.ListClustersResponse() + req.return_value._content = cloud_redis_cluster.Cluster.to_json( + cloud_redis_cluster.Cluster() ) - request = cloud_redis_cluster.ListClustersRequest() + request = cloud_redis_cluster.GetClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_redis_cluster.ListClustersResponse() + post.return_value = cloud_redis_cluster.Cluster() - client.list_clusters( + client.get_cluster( request, metadata=[ ("key", "val"), @@ -3531,8 +4324,8 @@ def test_list_clusters_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_clusters_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.ListClustersRequest +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.GetClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3540,7 +4333,7 @@ def test_list_clusters_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3552,10 +4345,10 @@ def test_list_clusters_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_clusters(request) + client.get_cluster(request) -def test_list_clusters_rest_flattened(): +def test_get_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3564,14 +4357,14 @@ def test_list_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.ListClustersResponse() + return_value = cloud_redis_cluster.Cluster() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -3579,24 +4372,24 @@ def test_list_clusters_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_clusters(**mock_args) + client.get_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_list_clusters_rest_flattened_error(transport: str = "rest"): +def test_get_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3605,136 +4398,160 @@ def test_list_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_clusters( - cloud_redis_cluster.ListClustersRequest(), - parent="parent_value", + client.get_cluster( + cloud_redis_cluster.GetClusterRequest(), + name="name_value", ) -def test_list_clusters_rest_pager(transport: str = "rest"): +def test_get_cluster_rest_error(): client = CloudRedisClusterClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_redis_cluster.ListClustersResponse( - clusters=[ - cloud_redis_cluster.Cluster(), - cloud_redis_cluster.Cluster(), - cloud_redis_cluster.Cluster(), - ], - next_page_token="abc", - ), - cloud_redis_cluster.ListClustersResponse( - clusters=[], - next_page_token="def", - ), - cloud_redis_cluster.ListClustersResponse( - clusters=[ - cloud_redis_cluster.Cluster(), - ], - next_page_token="ghi", - ), - cloud_redis_cluster.ListClustersResponse( - clusters=[ - cloud_redis_cluster.Cluster(), - cloud_redis_cluster.Cluster(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_redis_cluster.ListClustersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_clusters(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_redis_cluster.Cluster) for i in results) - - pages = list(client.list_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.GetClusterRequest, + cloud_redis_cluster.UpdateClusterRequest, dict, ], ) -def test_get_cluster_rest(request_type): +def test_update_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "size_gb": 739, + "shard_count": 1178, + "psc_configs": [{"network": "network_value"}], + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "psc_config": {}} + ], + "psc_connections": [ + { + "psc_connection_id": "psc_connection_id_value", + "address": "address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + } + ], + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "redis_configs": {}, + "precise_size_gb": 0.15810000000000002, + "zone_distribution_config": {"mode": 1, "zone": "zone_value"}, + "deletion_protection_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis_cluster.UpdateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.Cluster( - name="name_value", - state=cloud_redis_cluster.Cluster.State.CREATING, - uid="uid_value", - replica_count=1384, - authorization_mode=cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH, - transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, - size_gb=739, - shard_count=1178, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.update_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis_cluster.Cluster) - assert response.name == "name_value" - assert response.state == cloud_redis_cluster.Cluster.State.CREATING - assert response.uid == "uid_value" - assert response.replica_count == 1384 - assert ( - response.authorization_mode - == cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH - ) - assert ( - response.transit_encryption_mode - == cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED - ) - assert response.size_gb == 739 - assert response.shard_count == 1178 + assert response.operation.name == "operations/spam" -def test_get_cluster_rest_use_cached_wrapped_rpc(): +def test_update_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3748,35 +4565,38 @@ def test_get_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cluster in client._transport._wrapped_methods + assert client._transport.update_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc request = {} - client.get_cluster(request) + client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_cluster(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_cluster_rest_required_fields( - request_type=cloud_redis_cluster.GetClusterRequest, +def test_update_cluster_rest_required_fields( + request_type=cloud_redis_cluster.UpdateClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3787,21 +4607,24 @@ def test_get_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3810,7 +4633,7 @@ def test_get_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3822,39 +4645,50 @@ def test_get_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.update_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_cluster_rest_unset_required_fields(): +def test_update_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "cluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cluster_rest_interceptors(null_interceptor): +def test_update_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3867,14 +4701,16 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_get_cluster" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudRedisClusterRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_get_cluster" + transports.CloudRedisClusterRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.GetClusterRequest.pb( - cloud_redis_cluster.GetClusterRequest() + pb_message = cloud_redis_cluster.UpdateClusterRequest.pb( + cloud_redis_cluster.UpdateClusterRequest() ) transcode.return_value = { "method": "post", @@ -3886,19 +4722,19 @@ def test_get_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis_cluster.Cluster.to_json( - cloud_redis_cluster.Cluster() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_redis_cluster.GetClusterRequest() + request = cloud_redis_cluster.UpdateClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_redis_cluster.Cluster() + post.return_value = operations_pb2.Operation() - client.get_cluster( + client.update_cluster( request, metadata=[ ("key", "val"), @@ -3910,8 +4746,8 @@ def test_get_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.GetClusterRequest +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.UpdateClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3919,7 +4755,9 @@ def test_get_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3931,10 +4769,10 @@ def test_get_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_cluster(request) + client.update_cluster(request) -def test_get_cluster_rest_flattened(): +def test_update_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3943,39 +4781,41 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + cluster=cloud_redis_cluster.Cluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_cluster(**mock_args) + client.update_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + "%s/v1/{cluster.name=projects/*/locations/*/clusters/*}" + % client.transport._host, args[1], ) -def test_get_cluster_rest_flattened_error(transport: str = "rest"): +def test_update_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3984,13 +4824,14 @@ def test_get_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_cluster( - cloud_redis_cluster.GetClusterRequest(), - name="name_value", + client.update_cluster( + cloud_redis_cluster.UpdateClusterRequest(), + cluster=cloud_redis_cluster.Cluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_cluster_rest_error(): +def test_update_cluster_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3999,114 +4840,18 @@ def test_get_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.UpdateClusterRequest, + cloud_redis_cluster.DeleteClusterRequest, dict, ], ) -def test_update_cluster_rest(request_type): +def test_delete_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } - request_init["cluster"] = { - "name": "projects/sample1/locations/sample2/clusters/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "uid": "uid_value", - "replica_count": 1384, - "authorization_mode": 1, - "transit_encryption_mode": 1, - "size_gb": 739, - "shard_count": 1178, - "psc_configs": [{"network": "network_value"}], - "discovery_endpoints": [ - {"address": "address_value", "port": 453, "psc_config": {}} - ], - "psc_connections": [ - { - "psc_connection_id": "psc_connection_id_value", - "address": "address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - } - ], - "state_info": { - "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis_cluster.UpdateClusterRequest.meta.fields["cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4121,13 +4866,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_cluster_rest_use_cached_wrapped_rpc(): +def test_delete_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4141,17 +4886,17 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_cluster in client._transport._wrapped_methods + assert client._transport.delete_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc request = {} - client.update_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4160,19 +4905,20 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_cluster(request) + client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_cluster_rest_required_fields( - request_type=cloud_redis_cluster.UpdateClusterRequest, +def test_delete_cluster_rest_required_fields( + request_type=cloud_redis_cluster.DeleteClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4183,24 +4929,23 @@ def test_update_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4221,10 +4966,9 @@ def test_update_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4234,37 +4978,24 @@ def test_update_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_cluster_rest_unset_required_fields(): +def test_delete_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "cluster", - ) - ) - ) - + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): +def test_delete_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4279,14 +5010,14 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_update_cluster" + transports.CloudRedisClusterRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_update_cluster" + transports.CloudRedisClusterRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.UpdateClusterRequest.pb( - cloud_redis_cluster.UpdateClusterRequest() + pb_message = cloud_redis_cluster.DeleteClusterRequest.pb( + cloud_redis_cluster.DeleteClusterRequest() ) transcode.return_value = { "method": "post", @@ -4302,7 +5033,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_redis_cluster.UpdateClusterRequest() + request = cloud_redis_cluster.DeleteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4310,7 +5041,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_cluster( + client.delete_cluster( request, metadata=[ ("key", "val"), @@ -4322,8 +5053,8 @@ def test_update_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.UpdateClusterRequest +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.DeleteClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4331,9 +5062,7 @@ def test_update_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4345,10 +5074,10 @@ def test_update_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_cluster(request) + client.delete_cluster(request) -def test_update_cluster_rest_flattened(): +def test_delete_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4360,14 +5089,11 @@ def test_update_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - cluster=cloud_redis_cluster.Cluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -4378,20 +5104,19 @@ def test_update_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_cluster(**mock_args) + client.delete_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{cluster.name=projects/*/locations/*/clusters/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_update_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4400,14 +5125,13 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_cluster( - cloud_redis_cluster.UpdateClusterRequest(), - cluster=cloud_redis_cluster.Cluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_cluster( + cloud_redis_cluster.DeleteClusterRequest(), + name="name_value", ) -def test_update_cluster_rest_error(): +def test_delete_cluster_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4416,18 +5140,122 @@ def test_update_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.DeleteClusterRequest, + cloud_redis_cluster.CreateClusterRequest, dict, ], ) -def test_delete_cluster_rest(request_type): +def test_create_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "size_gb": 739, + "shard_count": 1178, + "psc_configs": [{"network": "network_value"}], + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "psc_config": {}} + ], + "psc_connections": [ + { + "psc_connection_id": "psc_connection_id_value", + "address": "address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + } + ], + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "redis_configs": {}, + "precise_size_gb": 0.15810000000000002, + "zone_distribution_config": {"mode": 1, "zone": "zone_value"}, + "deletion_protection_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis_cluster.CreateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4442,13 +5270,13 @@ def test_delete_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.create_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_cluster_rest_use_cached_wrapped_rpc(): +def test_create_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4462,17 +5290,17 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cluster in client._transport._wrapped_methods + assert client._transport.create_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc request = {} - client.delete_cluster(request) + client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4481,20 +5309,21 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_cluster(request) + client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_cluster_rest_required_fields( - request_type=cloud_redis_cluster.DeleteClusterRequest, +def test_create_cluster_rest_required_fields( + request_type=cloud_redis_cluster.CreateClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4502,26 +5331,37 @@ def test_delete_cluster_rest_required_fields( ) # verify fields with default values are dropped + assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4542,9 +5382,10 @@ def test_delete_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4554,24 +5395,44 @@ def test_delete_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.create_cluster(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_cluster_rest_unset_required_fields(): +def test_create_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): +def test_create_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4586,14 +5447,14 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_delete_cluster" + transports.CloudRedisClusterRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_delete_cluster" + transports.CloudRedisClusterRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.DeleteClusterRequest.pb( - cloud_redis_cluster.DeleteClusterRequest() + pb_message = cloud_redis_cluster.CreateClusterRequest.pb( + cloud_redis_cluster.CreateClusterRequest() ) transcode.return_value = { "method": "post", @@ -4609,7 +5470,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_redis_cluster.DeleteClusterRequest() + request = cloud_redis_cluster.CreateClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4617,7 +5478,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cluster( + client.create_cluster( request, metadata=[ ("key", "val"), @@ -4629,8 +5490,8 @@ def test_delete_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.DeleteClusterRequest +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.CreateClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4638,7 +5499,7 @@ def test_delete_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4650,10 +5511,10 @@ def test_delete_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_cluster(request) + client.create_cluster(request) -def test_delete_cluster_rest_flattened(): +def test_create_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4665,11 +5526,13 @@ def test_delete_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + cluster=cloud_redis_cluster.Cluster(name="name_value"), + cluster_id="cluster_id_value", ) mock_args.update(sample_request) @@ -4680,19 +5543,19 @@ def test_delete_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_cluster(**mock_args) + client.create_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, args[1], ) -def test_delete_cluster_rest_flattened_error(transport: str = "rest"): +def test_create_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4701,13 +5564,15 @@ def test_delete_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - cloud_redis_cluster.DeleteClusterRequest(), - name="name_value", + client.create_cluster( + cloud_redis_cluster.CreateClusterRequest(), + parent="parent_value", + cluster=cloud_redis_cluster.Cluster(name="name_value"), + cluster_id="cluster_id_value", ) -def test_delete_cluster_rest_error(): +def test_create_cluster_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4716,133 +5581,46 @@ def test_delete_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.CreateClusterRequest, + cloud_redis_cluster.GetClusterCertificateAuthorityRequest, dict, ], ) -def test_create_cluster_rest(request_type): +def test_get_cluster_certificate_authority_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["cluster"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "uid": "uid_value", - "replica_count": 1384, - "authorization_mode": 1, - "transit_encryption_mode": 1, - "size_gb": 739, - "shard_count": 1178, - "psc_configs": [{"network": "network_value"}], - "discovery_endpoints": [ - {"address": "address_value", "port": 453, "psc_config": {}} - ], - "psc_connections": [ - { - "psc_connection_id": "psc_connection_id_value", - "address": "address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - } - ], - "state_info": { - "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} - }, + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/certificateAuthority" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis_cluster.CreateClusterRequest.meta.fields["cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.get_cluster_certificate_authority(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_redis_cluster.CertificateAuthority) + assert response.name == "name_value" -def test_create_cluster_rest_use_cached_wrapped_rpc(): +def test_get_cluster_certificate_authority_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4856,40 +5634,40 @@ def test_create_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cluster in client._transport._wrapped_methods + assert ( + client._transport.get_cluster_certificate_authority + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_cluster_certificate_authority + ] = mock_rpc request = {} - client.create_cluster(request) + client.get_cluster_certificate_authority(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_cluster(request) + client.get_cluster_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_cluster_rest_required_fields( - request_type=cloud_redis_cluster.CreateClusterRequest, +def test_get_cluster_certificate_authority_rest_required_fields( + request_type=cloud_redis_cluster.GetClusterCertificateAuthorityRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["parent"] = "" - request_init["cluster_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4897,37 +5675,24 @@ def test_create_cluster_rest_required_fields( ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).get_cluster_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "cluster_id", - "request_id", - ) - ) + ).get_cluster_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4936,7 +5701,7 @@ def test_create_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_redis_cluster.CertificateAuthority() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4948,57 +5713,41 @@ def test_create_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis_cluster.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.get_cluster_certificate_authority(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_cluster_rest_unset_required_fields(): +def test_get_cluster_certificate_authority_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "clusterId", - "requestId", - ) - ) - & set( - ( - "parent", - "clusterId", - "cluster", - ) - ) + unset_fields = ( + transport.get_cluster_certificate_authority._get_unset_required_fields({}) ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_cluster_rest_interceptors(null_interceptor): +def test_get_cluster_certificate_authority_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5011,16 +5760,16 @@ def test_create_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_create_cluster" + transports.CloudRedisClusterRestInterceptor, + "post_get_cluster_certificate_authority", ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_create_cluster" + transports.CloudRedisClusterRestInterceptor, + "pre_get_cluster_certificate_authority", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.CreateClusterRequest.pb( - cloud_redis_cluster.CreateClusterRequest() + pb_message = cloud_redis_cluster.GetClusterCertificateAuthorityRequest.pb( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest() ) transcode.return_value = { "method": "post", @@ -5032,19 +5781,19 @@ def test_create_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_redis_cluster.CertificateAuthority.to_json( + cloud_redis_cluster.CertificateAuthority() ) - request = cloud_redis_cluster.CreateClusterRequest() + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_redis_cluster.CertificateAuthority() - client.create_cluster( + client.get_cluster_certificate_authority( request, metadata=[ ("key", "val"), @@ -5056,8 +5805,9 @@ def test_create_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.CreateClusterRequest +def test_get_cluster_certificate_authority_rest_bad_request( + transport: str = "rest", + request_type=cloud_redis_cluster.GetClusterCertificateAuthorityRequest, ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5065,7 +5815,9 @@ def test_create_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/certificateAuthority" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5077,10 +5829,10 @@ def test_create_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_cluster(request) + client.get_cluster_certificate_authority(request) -def test_create_cluster_rest_flattened(): +def test_get_cluster_certificate_authority_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5089,39 +5841,44 @@ def test_create_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_redis_cluster.CertificateAuthority() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/certificateAuthority" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - cluster=cloud_redis_cluster.Cluster(name="name_value"), - cluster_id="cluster_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_cluster(**mock_args) + client.get_cluster_certificate_authority(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/clusters/*/certificateAuthority}" + % client.transport._host, args[1], ) -def test_create_cluster_rest_flattened_error(transport: str = "rest"): +def test_get_cluster_certificate_authority_rest_flattened_error( + transport: str = "rest", +): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5130,15 +5887,13 @@ def test_create_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_cluster( - cloud_redis_cluster.CreateClusterRequest(), - parent="parent_value", - cluster=cloud_redis_cluster.Cluster(name="name_value"), - cluster_id="cluster_id_value", + client.get_cluster_certificate_authority( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest(), + name="name_value", ) -def test_create_cluster_rest_error(): +def test_get_cluster_certificate_authority_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5288,6 +6043,7 @@ def test_cloud_redis_cluster_base_transport(): "update_cluster", "delete_cluster", "create_cluster", + "get_cluster_certificate_authority", "get_location", "list_locations", "get_operation", @@ -5591,6 +6347,9 @@ def test_cloud_redis_cluster_client_transport_session_collision(transport_name): session1 = client1.transport.create_cluster._session session2 = client2.transport.create_cluster._session assert session1 != session2 + session1 = client1.transport.get_cluster_certificate_authority._session + session2 = client2.transport.get_cluster_certificate_authority._session + assert session1 != session2 def test_cloud_redis_cluster_grpc_transport_channel(): @@ -5753,25 +6512,53 @@ def test_cloud_redis_cluster_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_cluster_path(): +def test_certificate_authority_path(): project = "squid" location = "clam" cluster = "whelk" - expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + expected = "projects/{project}/locations/{location}/clusters/{cluster}/certificateAuthority".format( project=project, location=location, cluster=cluster, ) - actual = CloudRedisClusterClient.cluster_path(project, location, cluster) + actual = CloudRedisClusterClient.certificate_authority_path( + project, location, cluster + ) assert expected == actual -def test_parse_cluster_path(): +def test_parse_certificate_authority_path(): expected = { "project": "octopus", "location": "oyster", "cluster": "nudibranch", } + path = CloudRedisClusterClient.certificate_authority_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClusterClient.parse_certificate_authority_path(path) + assert expected == actual + + +def test_cluster_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = CloudRedisClusterClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cluster": "abalone", + } path = CloudRedisClusterClient.cluster_path(**expected) # Check that the path construction is reversible. @@ -5780,7 +6567,7 @@ def test_parse_cluster_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5790,7 +6577,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = CloudRedisClusterClient.common_billing_account_path(**expected) @@ -5800,7 +6587,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -5810,7 +6597,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = CloudRedisClusterClient.common_folder_path(**expected) @@ -5820,7 +6607,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -5830,7 +6617,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = CloudRedisClusterClient.common_organization_path(**expected) @@ -5840,7 +6627,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -5850,7 +6637,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = CloudRedisClusterClient.common_project_path(**expected) @@ -5860,8 +6647,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5872,8 +6659,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = CloudRedisClusterClient.common_location_path(**expected) diff --git a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py index 4999ed891671..8ed0407f51da 100644 --- a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py @@ -1583,13 +1583,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1765,6 +1765,9 @@ def test_get_cluster(request_type, transport: str = "grpc"): transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, size_gb=739, shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) response = client.get_cluster(request) @@ -1790,6 +1793,9 @@ def test_get_cluster(request_type, transport: str = "grpc"): ) assert response.size_gb == 739 assert response.shard_count == 1178 + assert response.node_type == cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO + assert math.isclose(response.precise_size_gb, 0.15810000000000002, rel_tol=1e-6) + assert response.deletion_protection_enabled is True def test_get_cluster_empty_call(): @@ -1896,6 +1902,9 @@ async def test_get_cluster_empty_call_async(): transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, size_gb=739, shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) ) response = await client.get_cluster() @@ -1976,6 +1985,9 @@ async def test_get_cluster_async( transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, size_gb=739, shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) ) response = await client.get_cluster(request) @@ -2002,6 +2014,9 @@ async def test_get_cluster_async( ) assert response.size_gb == 739 assert response.shard_count == 1178 + assert response.node_type == cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO + assert math.isclose(response.precise_size_gb, 0.15810000000000002, rel_tol=1e-6) + assert response.deletion_protection_enabled is True @pytest.mark.asyncio @@ -3298,6 +3313,399 @@ async def test_create_cluster_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis_cluster.GetClusterCertificateAuthorityRequest, + dict, + ], +) +def test_get_cluster_certificate_authority(request_type, transport: str = "grpc"): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) + response = client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis_cluster.CertificateAuthority) + assert response.name == "name_value" + + +def test_get_cluster_certificate_authority_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster_certificate_authority() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + +def test_get_cluster_certificate_authority_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster_certificate_authority(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis_cluster.GetClusterCertificateAuthorityRequest( + name="name_value", + ) + + +def test_get_cluster_certificate_authority_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cluster_certificate_authority + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cluster_certificate_authority + ] = mock_rpc + request = {} + client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) + ) + response = await client.get_cluster_certificate_authority() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cluster_certificate_authority + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cluster_certificate_authority + ] = mock_object + + request = {} + await client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cluster_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_async( + transport: str = "grpc_asyncio", + request_type=cloud_redis_cluster.GetClusterCertificateAuthorityRequest, +): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) + ) + response = await client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis_cluster.CertificateAuthority) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_async_from_dict(): + await test_get_cluster_certificate_authority_async(request_type=dict) + + +def test_get_cluster_certificate_authority_field_headers(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value = cloud_redis_cluster.CertificateAuthority() + client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_field_headers_async(): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority() + ) + await client.get_cluster_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_certificate_authority_flattened(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis_cluster.CertificateAuthority() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster_certificate_authority( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_certificate_authority_flattened_error(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster_certificate_authority( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_flattened_async(): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cluster_certificate_authority), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis_cluster.CertificateAuthority() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis_cluster.CertificateAuthority() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster_certificate_authority( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_certificate_authority_flattened_error_async(): + client = CloudRedisClusterAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster_certificate_authority( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3305,42 +3713,443 @@ async def test_create_cluster_flattened_error_async(): dict, ], ) -def test_list_clusters_rest(request_type): +def test_list_clusters_rest(request_type): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis_cluster.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_clusters_rest_required_fields( + request_type=cloud_redis_cluster.ListClustersRequest, +): + transport_class = transports.CloudRedisClusterRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis_cluster.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.CloudRedisClusterRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.CloudRedisClusterRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudRedisClusterRestInterceptor(), + ) + client = CloudRedisClusterClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudRedisClusterRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.CloudRedisClusterRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis_cluster.ListClustersRequest.pb( + cloud_redis_cluster.ListClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_redis_cluster.ListClustersResponse.to_json( + cloud_redis_cluster.ListClustersResponse() + ) + + request = cloud_redis_cluster.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis_cluster.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.ListClustersRequest +): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis_cluster.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + cloud_redis_cluster.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = CloudRedisClusterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_redis_cluster.ListClustersResponse( + clusters=[ + cloud_redis_cluster.Cluster(), + cloud_redis_cluster.Cluster(), + cloud_redis_cluster.Cluster(), + ], + next_page_token="abc", + ), + cloud_redis_cluster.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + cloud_redis_cluster.ListClustersResponse( + clusters=[ + cloud_redis_cluster.Cluster(), + ], + next_page_token="ghi", + ), + cloud_redis_cluster.ListClustersResponse( + clusters=[ + cloud_redis_cluster.Cluster(), + cloud_redis_cluster.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_redis_cluster.ListClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_redis_cluster.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis_cluster.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.ListClustersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = cloud_redis_cluster.Cluster( + name="name_value", + state=cloud_redis_cluster.Cluster.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH, + transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, + size_gb=739, + shard_count=1178, + node_type=cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO, + precise_size_gb=0.15810000000000002, + deletion_protection_enabled=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.get_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_redis_cluster.Cluster) + assert response.name == "name_value" + assert response.state == cloud_redis_cluster.Cluster.State.CREATING + assert response.uid == "uid_value" + assert response.replica_count == 1384 + assert ( + response.authorization_mode + == cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH + ) + assert ( + response.transit_encryption_mode + == cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED + ) + assert response.size_gb == 739 + assert response.shard_count == 1178 + assert response.node_type == cloud_redis_cluster.NodeType.REDIS_SHARED_CORE_NANO + assert math.isclose(response.precise_size_gb, 0.15810000000000002, rel_tol=1e-6) + assert response.deletion_protection_enabled is True -def test_list_clusters_rest_use_cached_wrapped_rpc(): +def test_get_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3354,35 +4163,35 @@ def test_list_clusters_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_clusters in client._transport._wrapped_methods + assert client._transport.get_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc request = {} - client.list_clusters(request) + client.get_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_clusters(request) + client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_clusters_rest_required_fields( - request_type=cloud_redis_cluster.ListClustersRequest, +def test_get_cluster_rest_required_fields( + request_type=cloud_redis_cluster.GetClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3393,28 +4202,21 @@ def test_list_clusters_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).get_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3423,7 +4225,7 @@ def test_list_clusters_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.ListClustersResponse() + return_value = cloud_redis_cluster.Cluster() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3444,38 +4246,30 @@ def test_list_clusters_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.get_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_clusters_rest_unset_required_fields(): +def test_get_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_clusters._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_clusters_rest_interceptors(null_interceptor): +def test_get_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3488,14 +4282,14 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_list_clusters" + transports.CloudRedisClusterRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_list_clusters" + transports.CloudRedisClusterRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.ListClustersRequest.pb( - cloud_redis_cluster.ListClustersRequest() + pb_message = cloud_redis_cluster.GetClusterRequest.pb( + cloud_redis_cluster.GetClusterRequest() ) transcode.return_value = { "method": "post", @@ -3507,19 +4301,19 @@ def test_list_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis_cluster.ListClustersResponse.to_json( - cloud_redis_cluster.ListClustersResponse() + req.return_value._content = cloud_redis_cluster.Cluster.to_json( + cloud_redis_cluster.Cluster() ) - request = cloud_redis_cluster.ListClustersRequest() + request = cloud_redis_cluster.GetClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_redis_cluster.ListClustersResponse() + post.return_value = cloud_redis_cluster.Cluster() - client.list_clusters( + client.get_cluster( request, metadata=[ ("key", "val"), @@ -3531,8 +4325,8 @@ def test_list_clusters_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_clusters_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.ListClustersRequest +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.GetClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3540,7 +4334,7 @@ def test_list_clusters_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3552,10 +4346,10 @@ def test_list_clusters_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_clusters(request) + client.get_cluster(request) -def test_list_clusters_rest_flattened(): +def test_get_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3564,14 +4358,14 @@ def test_list_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.ListClustersResponse() + return_value = cloud_redis_cluster.Cluster() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -3579,25 +4373,25 @@ def test_list_clusters_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_redis_cluster.ListClustersResponse.pb(return_value) + return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_clusters(**mock_args) + client.get_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta1/{parent=projects/*/locations/*}/clusters" + "%s/v1beta1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_list_clusters_rest_flattened_error(transport: str = "rest"): +def test_get_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3606,136 +4400,160 @@ def test_list_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_clusters( - cloud_redis_cluster.ListClustersRequest(), - parent="parent_value", + client.get_cluster( + cloud_redis_cluster.GetClusterRequest(), + name="name_value", ) -def test_list_clusters_rest_pager(transport: str = "rest"): +def test_get_cluster_rest_error(): client = CloudRedisClusterClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_redis_cluster.ListClustersResponse( - clusters=[ - cloud_redis_cluster.Cluster(), - cloud_redis_cluster.Cluster(), - cloud_redis_cluster.Cluster(), - ], - next_page_token="abc", - ), - cloud_redis_cluster.ListClustersResponse( - clusters=[], - next_page_token="def", - ), - cloud_redis_cluster.ListClustersResponse( - clusters=[ - cloud_redis_cluster.Cluster(), - ], - next_page_token="ghi", - ), - cloud_redis_cluster.ListClustersResponse( - clusters=[ - cloud_redis_cluster.Cluster(), - cloud_redis_cluster.Cluster(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_redis_cluster.ListClustersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_clusters(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_redis_cluster.Cluster) for i in results) - - pages = list(client.list_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.GetClusterRequest, + cloud_redis_cluster.UpdateClusterRequest, dict, ], ) -def test_get_cluster_rest(request_type): +def test_update_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "size_gb": 739, + "shard_count": 1178, + "psc_configs": [{"network": "network_value"}], + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "psc_config": {}} + ], + "psc_connections": [ + { + "psc_connection_id": "psc_connection_id_value", + "address": "address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + } + ], + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "redis_configs": {}, + "precise_size_gb": 0.15810000000000002, + "zone_distribution_config": {"mode": 1, "zone": "zone_value"}, + "deletion_protection_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis_cluster.UpdateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.Cluster( - name="name_value", - state=cloud_redis_cluster.Cluster.State.CREATING, - uid="uid_value", - replica_count=1384, - authorization_mode=cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH, - transit_encryption_mode=cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED, - size_gb=739, - shard_count=1178, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.update_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis_cluster.Cluster) - assert response.name == "name_value" - assert response.state == cloud_redis_cluster.Cluster.State.CREATING - assert response.uid == "uid_value" - assert response.replica_count == 1384 - assert ( - response.authorization_mode - == cloud_redis_cluster.AuthorizationMode.AUTH_MODE_IAM_AUTH - ) - assert ( - response.transit_encryption_mode - == cloud_redis_cluster.TransitEncryptionMode.TRANSIT_ENCRYPTION_MODE_DISABLED - ) - assert response.size_gb == 739 - assert response.shard_count == 1178 + assert response.operation.name == "operations/spam" -def test_get_cluster_rest_use_cached_wrapped_rpc(): +def test_update_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3749,35 +4567,38 @@ def test_get_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cluster in client._transport._wrapped_methods + assert client._transport.update_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc request = {} - client.get_cluster(request) + client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_cluster(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_cluster_rest_required_fields( - request_type=cloud_redis_cluster.GetClusterRequest, +def test_update_cluster_rest_required_fields( + request_type=cloud_redis_cluster.UpdateClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3788,21 +4609,24 @@ def test_get_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3811,7 +4635,7 @@ def test_get_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3823,39 +4647,50 @@ def test_get_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.update_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_cluster_rest_unset_required_fields(): +def test_update_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "cluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cluster_rest_interceptors(null_interceptor): +def test_update_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3868,14 +4703,16 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_get_cluster" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudRedisClusterRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_get_cluster" + transports.CloudRedisClusterRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.GetClusterRequest.pb( - cloud_redis_cluster.GetClusterRequest() + pb_message = cloud_redis_cluster.UpdateClusterRequest.pb( + cloud_redis_cluster.UpdateClusterRequest() ) transcode.return_value = { "method": "post", @@ -3887,19 +4724,19 @@ def test_get_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis_cluster.Cluster.to_json( - cloud_redis_cluster.Cluster() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_redis_cluster.GetClusterRequest() + request = cloud_redis_cluster.UpdateClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_redis_cluster.Cluster() + post.return_value = operations_pb2.Operation() - client.get_cluster( + client.update_cluster( request, metadata=[ ("key", "val"), @@ -3911,8 +4748,8 @@ def test_get_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.GetClusterRequest +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.UpdateClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3920,7 +4757,9 @@ def test_get_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3932,10 +4771,10 @@ def test_get_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_cluster(request) + client.update_cluster(request) -def test_get_cluster_rest_flattened(): +def test_update_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3944,40 +4783,41 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis_cluster.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + cluster=cloud_redis_cluster.Cluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis_cluster.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_cluster(**mock_args) + client.update_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta1/{name=projects/*/locations/*/clusters/*}" + "%s/v1beta1/{cluster.name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_get_cluster_rest_flattened_error(transport: str = "rest"): +def test_update_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3986,13 +4826,14 @@ def test_get_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_cluster( - cloud_redis_cluster.GetClusterRequest(), - name="name_value", + client.update_cluster( + cloud_redis_cluster.UpdateClusterRequest(), + cluster=cloud_redis_cluster.Cluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_cluster_rest_error(): +def test_update_cluster_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4001,114 +4842,18 @@ def test_get_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.UpdateClusterRequest, + cloud_redis_cluster.DeleteClusterRequest, dict, ], ) -def test_update_cluster_rest(request_type): +def test_delete_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } - request_init["cluster"] = { - "name": "projects/sample1/locations/sample2/clusters/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "uid": "uid_value", - "replica_count": 1384, - "authorization_mode": 1, - "transit_encryption_mode": 1, - "size_gb": 739, - "shard_count": 1178, - "psc_configs": [{"network": "network_value"}], - "discovery_endpoints": [ - {"address": "address_value", "port": 453, "psc_config": {}} - ], - "psc_connections": [ - { - "psc_connection_id": "psc_connection_id_value", - "address": "address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - } - ], - "state_info": { - "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis_cluster.UpdateClusterRequest.meta.fields["cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4123,13 +4868,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_cluster_rest_use_cached_wrapped_rpc(): +def test_delete_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4143,17 +4888,17 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_cluster in client._transport._wrapped_methods + assert client._transport.delete_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc request = {} - client.update_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4162,19 +4907,20 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_cluster(request) + client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_cluster_rest_required_fields( - request_type=cloud_redis_cluster.UpdateClusterRequest, +def test_delete_cluster_rest_required_fields( + request_type=cloud_redis_cluster.DeleteClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4185,24 +4931,23 @@ def test_update_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4223,10 +4968,9 @@ def test_update_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4236,37 +4980,24 @@ def test_update_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_cluster_rest_unset_required_fields(): - transport = transports.CloudRedisClusterRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "cluster", - ) - ) +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.CloudRedisClusterRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): +def test_delete_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4281,14 +5012,14 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_update_cluster" + transports.CloudRedisClusterRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_update_cluster" + transports.CloudRedisClusterRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.UpdateClusterRequest.pb( - cloud_redis_cluster.UpdateClusterRequest() + pb_message = cloud_redis_cluster.DeleteClusterRequest.pb( + cloud_redis_cluster.DeleteClusterRequest() ) transcode.return_value = { "method": "post", @@ -4304,7 +5035,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_redis_cluster.UpdateClusterRequest() + request = cloud_redis_cluster.DeleteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4312,7 +5043,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_cluster( + client.delete_cluster( request, metadata=[ ("key", "val"), @@ -4324,8 +5055,8 @@ def test_update_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.UpdateClusterRequest +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.DeleteClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4333,9 +5064,7 @@ def test_update_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4347,10 +5076,10 @@ def test_update_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_cluster(request) + client.delete_cluster(request) -def test_update_cluster_rest_flattened(): +def test_delete_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4362,14 +5091,11 @@ def test_update_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - cluster=cloud_redis_cluster.Cluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -4380,20 +5106,20 @@ def test_update_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_cluster(**mock_args) + client.delete_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta1/{cluster.name=projects/*/locations/*/clusters/*}" + "%s/v1beta1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_update_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4402,14 +5128,13 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_cluster( - cloud_redis_cluster.UpdateClusterRequest(), - cluster=cloud_redis_cluster.Cluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_cluster( + cloud_redis_cluster.DeleteClusterRequest(), + name="name_value", ) -def test_update_cluster_rest_error(): +def test_delete_cluster_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4418,18 +5143,122 @@ def test_update_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.DeleteClusterRequest, + cloud_redis_cluster.CreateClusterRequest, dict, ], ) -def test_delete_cluster_rest(request_type): +def test_create_cluster_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "size_gb": 739, + "shard_count": 1178, + "psc_configs": [{"network": "network_value"}], + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "psc_config": {}} + ], + "psc_connections": [ + { + "psc_connection_id": "psc_connection_id_value", + "address": "address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + } + ], + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "redis_configs": {}, + "precise_size_gb": 0.15810000000000002, + "zone_distribution_config": {"mode": 1, "zone": "zone_value"}, + "deletion_protection_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis_cluster.CreateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4444,13 +5273,13 @@ def test_delete_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.create_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_cluster_rest_use_cached_wrapped_rpc(): +def test_create_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4464,17 +5293,17 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cluster in client._transport._wrapped_methods + assert client._transport.create_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc request = {} - client.delete_cluster(request) + client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4483,20 +5312,21 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_cluster(request) + client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_cluster_rest_required_fields( - request_type=cloud_redis_cluster.DeleteClusterRequest, +def test_create_cluster_rest_required_fields( + request_type=cloud_redis_cluster.CreateClusterRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4504,26 +5334,37 @@ def test_delete_cluster_rest_required_fields( ) # verify fields with default values are dropped + assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4544,9 +5385,10 @@ def test_delete_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4556,24 +5398,44 @@ def test_delete_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.create_cluster(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_cluster_rest_unset_required_fields(): +def test_create_cluster_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): +def test_create_cluster_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4588,14 +5450,14 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_delete_cluster" + transports.CloudRedisClusterRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_delete_cluster" + transports.CloudRedisClusterRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.DeleteClusterRequest.pb( - cloud_redis_cluster.DeleteClusterRequest() + pb_message = cloud_redis_cluster.CreateClusterRequest.pb( + cloud_redis_cluster.CreateClusterRequest() ) transcode.return_value = { "method": "post", @@ -4611,7 +5473,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_redis_cluster.DeleteClusterRequest() + request = cloud_redis_cluster.CreateClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4619,7 +5481,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cluster( + client.create_cluster( request, metadata=[ ("key", "val"), @@ -4631,8 +5493,8 @@ def test_delete_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.DeleteClusterRequest +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=cloud_redis_cluster.CreateClusterRequest ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4640,7 +5502,7 @@ def test_delete_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4652,10 +5514,10 @@ def test_delete_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_cluster(request) + client.create_cluster(request) -def test_delete_cluster_rest_flattened(): +def test_create_cluster_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4667,11 +5529,13 @@ def test_delete_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + cluster=cloud_redis_cluster.Cluster(name="name_value"), + cluster_id="cluster_id_value", ) mock_args.update(sample_request) @@ -4682,20 +5546,20 @@ def test_delete_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_cluster(**mock_args) + client.create_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta1/{name=projects/*/locations/*/clusters/*}" + "%s/v1beta1/{parent=projects/*/locations/*}/clusters" % client.transport._host, args[1], ) -def test_delete_cluster_rest_flattened_error(transport: str = "rest"): +def test_create_cluster_rest_flattened_error(transport: str = "rest"): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4704,13 +5568,15 @@ def test_delete_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - cloud_redis_cluster.DeleteClusterRequest(), - name="name_value", + client.create_cluster( + cloud_redis_cluster.CreateClusterRequest(), + parent="parent_value", + cluster=cloud_redis_cluster.Cluster(name="name_value"), + cluster_id="cluster_id_value", ) -def test_delete_cluster_rest_error(): +def test_create_cluster_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4719,133 +5585,46 @@ def test_delete_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_redis_cluster.CreateClusterRequest, + cloud_redis_cluster.GetClusterCertificateAuthorityRequest, dict, ], ) -def test_create_cluster_rest(request_type): +def test_get_cluster_certificate_authority_rest(request_type): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["cluster"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "uid": "uid_value", - "replica_count": 1384, - "authorization_mode": 1, - "transit_encryption_mode": 1, - "size_gb": 739, - "shard_count": 1178, - "psc_configs": [{"network": "network_value"}], - "discovery_endpoints": [ - {"address": "address_value", "port": 453, "psc_config": {}} - ], - "psc_connections": [ - { - "psc_connection_id": "psc_connection_id_value", - "address": "address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - } - ], - "state_info": { - "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} - }, + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/certificateAuthority" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis_cluster.CreateClusterRequest.meta.fields["cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_redis_cluster.CertificateAuthority( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.get_cluster_certificate_authority(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_redis_cluster.CertificateAuthority) + assert response.name == "name_value" -def test_create_cluster_rest_use_cached_wrapped_rpc(): +def test_get_cluster_certificate_authority_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4859,40 +5638,40 @@ def test_create_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cluster in client._transport._wrapped_methods + assert ( + client._transport.get_cluster_certificate_authority + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_cluster_certificate_authority + ] = mock_rpc request = {} - client.create_cluster(request) + client.get_cluster_certificate_authority(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_cluster(request) + client.get_cluster_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_cluster_rest_required_fields( - request_type=cloud_redis_cluster.CreateClusterRequest, +def test_get_cluster_certificate_authority_rest_required_fields( + request_type=cloud_redis_cluster.GetClusterCertificateAuthorityRequest, ): transport_class = transports.CloudRedisClusterRestTransport request_init = {} - request_init["parent"] = "" - request_init["cluster_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4900,37 +5679,24 @@ def test_create_cluster_rest_required_fields( ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).get_cluster_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "cluster_id", - "request_id", - ) - ) + ).get_cluster_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4939,7 +5705,7 @@ def test_create_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_redis_cluster.CertificateAuthority() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4951,57 +5717,41 @@ def test_create_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis_cluster.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.get_cluster_certificate_authority(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_cluster_rest_unset_required_fields(): +def test_get_cluster_certificate_authority_rest_unset_required_fields(): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "clusterId", - "requestId", - ) - ) - & set( - ( - "parent", - "clusterId", - "cluster", - ) - ) + unset_fields = ( + transport.get_cluster_certificate_authority._get_unset_required_fields({}) ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_cluster_rest_interceptors(null_interceptor): +def test_get_cluster_certificate_authority_rest_interceptors(null_interceptor): transport = transports.CloudRedisClusterRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5014,16 +5764,16 @@ def test_create_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "post_create_cluster" + transports.CloudRedisClusterRestInterceptor, + "post_get_cluster_certificate_authority", ) as post, mock.patch.object( - transports.CloudRedisClusterRestInterceptor, "pre_create_cluster" + transports.CloudRedisClusterRestInterceptor, + "pre_get_cluster_certificate_authority", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis_cluster.CreateClusterRequest.pb( - cloud_redis_cluster.CreateClusterRequest() + pb_message = cloud_redis_cluster.GetClusterCertificateAuthorityRequest.pb( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest() ) transcode.return_value = { "method": "post", @@ -5035,19 +5785,19 @@ def test_create_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_redis_cluster.CertificateAuthority.to_json( + cloud_redis_cluster.CertificateAuthority() ) - request = cloud_redis_cluster.CreateClusterRequest() + request = cloud_redis_cluster.GetClusterCertificateAuthorityRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_redis_cluster.CertificateAuthority() - client.create_cluster( + client.get_cluster_certificate_authority( request, metadata=[ ("key", "val"), @@ -5059,8 +5809,9 @@ def test_create_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_cluster_rest_bad_request( - transport: str = "rest", request_type=cloud_redis_cluster.CreateClusterRequest +def test_get_cluster_certificate_authority_rest_bad_request( + transport: str = "rest", + request_type=cloud_redis_cluster.GetClusterCertificateAuthorityRequest, ): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5068,7 +5819,9 @@ def test_create_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/certificateAuthority" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5080,10 +5833,10 @@ def test_create_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_cluster(request) + client.get_cluster_certificate_authority(request) -def test_create_cluster_rest_flattened(): +def test_get_cluster_certificate_authority_rest_flattened(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5092,40 +5845,44 @@ def test_create_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_redis_cluster.CertificateAuthority() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/certificateAuthority" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - cluster=cloud_redis_cluster.Cluster(name="name_value"), - cluster_id="cluster_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis_cluster.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_cluster(**mock_args) + client.get_cluster_certificate_authority(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta1/{parent=projects/*/locations/*}/clusters" + "%s/v1beta1/{name=projects/*/locations/*/clusters/*/certificateAuthority}" % client.transport._host, args[1], ) -def test_create_cluster_rest_flattened_error(transport: str = "rest"): +def test_get_cluster_certificate_authority_rest_flattened_error( + transport: str = "rest", +): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5134,15 +5891,13 @@ def test_create_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_cluster( - cloud_redis_cluster.CreateClusterRequest(), - parent="parent_value", - cluster=cloud_redis_cluster.Cluster(name="name_value"), - cluster_id="cluster_id_value", + client.get_cluster_certificate_authority( + cloud_redis_cluster.GetClusterCertificateAuthorityRequest(), + name="name_value", ) -def test_create_cluster_rest_error(): +def test_get_cluster_certificate_authority_rest_error(): client = CloudRedisClusterClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5292,6 +6047,7 @@ def test_cloud_redis_cluster_base_transport(): "update_cluster", "delete_cluster", "create_cluster", + "get_cluster_certificate_authority", "get_location", "list_locations", "get_operation", @@ -5595,6 +6351,9 @@ def test_cloud_redis_cluster_client_transport_session_collision(transport_name): session1 = client1.transport.create_cluster._session session2 = client2.transport.create_cluster._session assert session1 != session2 + session1 = client1.transport.get_cluster_certificate_authority._session + session2 = client2.transport.get_cluster_certificate_authority._session + assert session1 != session2 def test_cloud_redis_cluster_grpc_transport_channel(): @@ -5757,25 +6516,53 @@ def test_cloud_redis_cluster_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_cluster_path(): +def test_certificate_authority_path(): project = "squid" location = "clam" cluster = "whelk" - expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + expected = "projects/{project}/locations/{location}/clusters/{cluster}/certificateAuthority".format( project=project, location=location, cluster=cluster, ) - actual = CloudRedisClusterClient.cluster_path(project, location, cluster) + actual = CloudRedisClusterClient.certificate_authority_path( + project, location, cluster + ) assert expected == actual -def test_parse_cluster_path(): +def test_parse_certificate_authority_path(): expected = { "project": "octopus", "location": "oyster", "cluster": "nudibranch", } + path = CloudRedisClusterClient.certificate_authority_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClusterClient.parse_certificate_authority_path(path) + assert expected == actual + + +def test_cluster_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = CloudRedisClusterClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cluster": "abalone", + } path = CloudRedisClusterClient.cluster_path(**expected) # Check that the path construction is reversible. @@ -5784,7 +6571,7 @@ def test_parse_cluster_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5794,7 +6581,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = CloudRedisClusterClient.common_billing_account_path(**expected) @@ -5804,7 +6591,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -5814,7 +6601,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = CloudRedisClusterClient.common_folder_path(**expected) @@ -5824,7 +6611,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -5834,7 +6621,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = CloudRedisClusterClient.common_organization_path(**expected) @@ -5844,7 +6631,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -5854,7 +6641,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = CloudRedisClusterClient.common_project_path(**expected) @@ -5864,8 +6651,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5876,8 +6663,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = CloudRedisClusterClient.common_location_path(**expected) diff --git a/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index d18897abac83..667cbb2db06c 100644 --- a/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1505,13 +1505,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py b/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py index 28a17bf65f26..c20afc47e1af 100644 --- a/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py +++ b/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py @@ -1505,13 +1505,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py index be128ba1e9c5..c3958d0bf85e 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py @@ -1798,10 +1798,10 @@ def test_list_folders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_folders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2295,10 +2295,10 @@ def test_search_folders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_folders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py index 22f0672a309b..45b21aedab77 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py @@ -1877,10 +1877,10 @@ def test_search_organizations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_organizations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py index 672902059055..3184549c201e 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py @@ -1816,10 +1816,10 @@ def test_list_projects_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_projects(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2313,10 +2313,10 @@ def test_search_projects_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_projects(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py index 71fc1966da06..e0f4d2d0e1eb 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py @@ -1458,10 +1458,10 @@ def test_list_tag_bindings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tag_bindings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2694,10 +2694,10 @@ def test_list_effective_tags_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_effective_tags(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py index 7b257c7d61d6..bc2681b87d61 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py @@ -2233,13 +2233,13 @@ def test_list_tag_holds_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tag_holds(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py index ea29786b66b3..17b3e4c5d655 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py @@ -1417,10 +1417,10 @@ def test_list_tag_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tag_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py index a98e041ec376..fbf6eeec28a1 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py @@ -1439,10 +1439,10 @@ def test_list_tag_values_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tag_values(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-resource-settings/CHANGELOG.md b/packages/google-cloud-resource-settings/CHANGELOG.md index cea659fa64a8..ba5f29546428 100644 --- a/packages/google-cloud-resource-settings/CHANGELOG.md +++ b/packages/google-cloud-resource-settings/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.9.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-resource-settings-v1.9.3...google-cloud-resource-settings-v1.9.4) (2024-06-05) + + +### Documentation + +* [google-cloud-resource-settings] Resource Settings is deprecated. As of November 7, 2023, no organizations will be onboarded for any of the enabled settings, and the service will be shut down on October 1, 2024 ([#12766](https://github.com/googleapis/google-cloud-python/issues/12766)) ([d2a2825](https://github.com/googleapis/google-cloud-python/commit/d2a282512c457c5b348aeef118b6ea7df5a2bb6f)) + ## [1.9.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-resource-settings-v1.9.2...google-cloud-resource-settings-v1.9.3) (2024-03-05) diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py index 558c8aab67c5..1cb991d3a3f6 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.9.4" # {x-release-please-version} diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py index 558c8aab67c5..1cb991d3a3f6 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.9.4" # {x-release-please-version} diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py index 89d1a0f34a04..55fca55a416b 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py @@ -65,6 +65,10 @@ class ResourceSettingsServiceAsyncClient: ``parent`` resource is not in a Cloud Organization. For all requests, returns a ``google.rpc.Status`` with ``google.rpc.Code.INVALID_ARGUMENT`` if the request is malformed. + (== deprecation_description Resource Settings is deprecated. As of + November 7, 2023, no organizations will be onboarded for any of the + enabled settings, and the service will be shut down on October 1, + 2024. ==) """ _client: ResourceSettingsServiceClient diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py index 75d8c8b6f892..6a65bb939826 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py @@ -108,6 +108,10 @@ class ResourceSettingsServiceClient(metaclass=ResourceSettingsServiceClientMeta) ``parent`` resource is not in a Cloud Organization. For all requests, returns a ``google.rpc.Status`` with ``google.rpc.Code.INVALID_ARGUMENT`` if the request is malformed. + (== deprecation_description Resource Settings is deprecated. As of + November 7, 2023, no organizations will be onboarded for any of the + enabled settings, and the service will be shut down on October 1, + 2024. ==) """ @staticmethod diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc.py index 7f8890e5e4c8..5a935fde5ca1 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc.py @@ -43,6 +43,10 @@ class ResourceSettingsServiceGrpcTransport(ResourceSettingsServiceTransport): ``parent`` resource is not in a Cloud Organization. For all requests, returns a ``google.rpc.Status`` with ``google.rpc.Code.INVALID_ARGUMENT`` if the request is malformed. + (== deprecation_description Resource Settings is deprecated. As of + November 7, 2023, no organizations will be onboarded for any of the + enabled settings, and the service will be shut down on October 1, + 2024. ==) This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc_asyncio.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc_asyncio.py index 9038cdb466cf..bafd8279545e 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/grpc_asyncio.py @@ -46,6 +46,10 @@ class ResourceSettingsServiceGrpcAsyncIOTransport(ResourceSettingsServiceTranspo ``parent`` resource is not in a Cloud Organization. For all requests, returns a ``google.rpc.Status`` with ``google.rpc.Code.INVALID_ARGUMENT`` if the request is malformed. + (== deprecation_description Resource Settings is deprecated. As of + November 7, 2023, no organizations will be onboarded for any of the + enabled settings, and the service will be shut down on October 1, + 2024. ==) This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/rest.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/rest.py index aa2f0dc486c0..6ec6f335405c 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/rest.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/transports/rest.py @@ -186,6 +186,10 @@ class ResourceSettingsServiceRestTransport(ResourceSettingsServiceTransport): ``parent`` resource is not in a Cloud Organization. For all requests, returns a ``google.rpc.Status`` with ``google.rpc.Code.INVALID_ARGUMENT`` if the request is malformed. + (== deprecation_description Resource Settings is deprecated. As of + November 7, 2023, no organizations will be onboarded for any of the + enabled settings, and the service will be shut down on October 1, + 2024. ==) This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json b/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json index 3465a96568ae..b4fc9a408ed3 100644 --- a/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json +++ b/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-resource-settings", - "version": "0.1.0" + "version": "1.9.4" }, "snippets": [ { diff --git a/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py b/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py index 2a4af7581511..76e01d2c2877 100644 --- a/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py +++ b/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py @@ -1596,13 +1596,13 @@ def test_list_settings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_settings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/CHANGELOG.md b/packages/google-cloud-retail/CHANGELOG.md index 8be9bec95433..a945b07bb901 100644 --- a/packages/google-cloud-retail/CHANGELOG.md +++ b/packages/google-cloud-retail/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [1.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-retail-v1.20.1...google-cloud-retail-v1.21.0) (2024-06-10) + + +### Features + +* add page_categories to control condition ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) +* add product purge API ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) +* allow to skip denylist postfiltering in recommendations ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) +* support attribute suggestion in autocomplete ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) +* support frequent bought together model config ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) +* support merged facets ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) + + +### Documentation + +* keep the API doc up-to-date with recent changes ([ca7628f](https://github.com/googleapis/google-cloud-python/commit/ca7628f136ef27511ab5426f706e82b7f3999785)) + ## [1.20.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-retail-v1.20.0...google-cloud-retail-v1.20.1) (2024-03-05) diff --git a/packages/google-cloud-retail/docs/retail_v2alpha/branch_service.rst b/packages/google-cloud-retail/docs/retail_v2alpha/branch_service.rst new file mode 100644 index 000000000000..8e4c38fac248 --- /dev/null +++ b/packages/google-cloud-retail/docs/retail_v2alpha/branch_service.rst @@ -0,0 +1,6 @@ +BranchService +------------------------------- + +.. automodule:: google.cloud.retail_v2alpha.services.branch_service + :members: + :inherited-members: diff --git a/packages/google-cloud-retail/docs/retail_v2alpha/project_service.rst b/packages/google-cloud-retail/docs/retail_v2alpha/project_service.rst new file mode 100644 index 000000000000..96e35a1b3c5a --- /dev/null +++ b/packages/google-cloud-retail/docs/retail_v2alpha/project_service.rst @@ -0,0 +1,6 @@ +ProjectService +-------------------------------- + +.. automodule:: google.cloud.retail_v2alpha.services.project_service + :members: + :inherited-members: diff --git a/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst b/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst index 1cb0a4f0b623..e553f71a69c2 100644 --- a/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst +++ b/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst @@ -4,6 +4,7 @@ Services for Google Cloud Retail v2alpha API :maxdepth: 2 analytics_service + branch_service catalog_service completion_service control_service @@ -11,6 +12,7 @@ Services for Google Cloud Retail v2alpha API model_service prediction_service product_service + project_service search_service serving_config_service user_event_service diff --git a/packages/google-cloud-retail/google/cloud/retail/__init__.py b/packages/google-cloud-retail/google/cloud/retail/__init__.py index 56c1c44fd3f5..4ab2ad33dce8 100644 --- a/packages/google-cloud-retail/google/cloud/retail/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail/__init__.py @@ -195,6 +195,9 @@ from google.cloud.retail_v2.types.promotion import Promotion from google.cloud.retail_v2.types.purge_config import ( PurgeMetadata, + PurgeProductsMetadata, + PurgeProductsRequest, + PurgeProductsResponse, PurgeUserEventsRequest, PurgeUserEventsResponse, ) @@ -355,6 +358,9 @@ "UpdateProductRequest", "Promotion", "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", "ExperimentInfo", diff --git a/packages/google-cloud-retail/google/cloud/retail/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail/gapic_version.py index 558c8aab67c5..785067d93b3c 100644 --- a/packages/google-cloud-retail/google/cloud/retail/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2/__init__.py index 2d2486e51a4f..b15ab210720f 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/__init__.py @@ -164,6 +164,9 @@ from .types.promotion import Promotion from .types.purge_config import ( PurgeMetadata, + PurgeProductsMetadata, + PurgeProductsRequest, + PurgeProductsResponse, PurgeUserEventsRequest, PurgeUserEventsResponse, ) @@ -297,6 +300,9 @@ "Promotion", "PurchaseTransaction", "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", "Rating", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_metadata.json b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_metadata.json index 703f91957548..f9d0979e7517 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_metadata.json +++ b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_metadata.json @@ -579,6 +579,11 @@ "list_products" ] }, + "PurgeProducts": { + "methods": [ + "purge_products" + ] + }, "RemoveFulfillmentPlaces": { "methods": [ "remove_fulfillment_places" @@ -639,6 +644,11 @@ "list_products" ] }, + "PurgeProducts": { + "methods": [ + "purge_products" + ] + }, "RemoveFulfillmentPlaces": { "methods": [ "remove_fulfillment_places" @@ -699,6 +709,11 @@ "list_products" ] }, + "PurgeProducts": { + "methods": [ + "purge_products" + ] + }, "RemoveFulfillmentPlaces": { "methods": [ "remove_fulfillment_places" diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py index 558c8aab67c5..785067d93b3c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py index e51f5e8f1afa..a0c843ff9ffb 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py @@ -53,10 +53,10 @@ from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.retail_v2.services.product_service import pagers +from google.cloud.retail_v2.types import product_service, promotion, purge_config from google.cloud.retail_v2.types import common, import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service, promotion from .client import ProductServiceClient from .transports.base import DEFAULT_CLIENT_INFO, ProductServiceTransport @@ -929,6 +929,125 @@ async def sample_delete_product(): metadata=metadata, ) + async def purge_products( + self, + request: Optional[Union[purge_config.PurgeProductsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes all selected + [Product][google.cloud.retail.v2.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2.Product]s may + still be returned by + [ProductService.GetProduct][google.cloud.retail.v2.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2.Product]s, this operation could + take hours to complete. To get a sample of + [Product][google.cloud.retail.v2.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2.PurgeProductsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2 + + async def sample_purge_products(): + # Create a client + client = retail_v2.ProductServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2.types.PurgeProductsRequest, dict]]): + The request object. Request message for PurgeProducts + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2.types.PurgeProductsResponse` Response of the PurgeProductsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeProductsRequest): + request = purge_config.PurgeProductsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.purge_products + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeProductsResponse, + metadata_type=purge_config.PurgeProductsMetadata, + ) + + # Done; return the response. + return response + async def import_products( self, request: Optional[Union[import_config.ImportProductsRequest, dict]] = None, @@ -1312,10 +1431,11 @@ async def add_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -1475,10 +1595,11 @@ async def remove_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py index f7c29dc2f9ec..30bf98f2d2db 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py @@ -58,10 +58,10 @@ from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.retail_v2.services.product_service import pagers +from google.cloud.retail_v2.types import product_service, promotion, purge_config from google.cloud.retail_v2.types import common, import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service, promotion from .transports.base import DEFAULT_CLIENT_INFO, ProductServiceTransport from .transports.grpc import ProductServiceGrpcTransport @@ -1360,6 +1360,123 @@ def sample_delete_product(): metadata=metadata, ) + def purge_products( + self, + request: Optional[Union[purge_config.PurgeProductsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Permanently deletes all selected + [Product][google.cloud.retail.v2.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2.Product]s may + still be returned by + [ProductService.GetProduct][google.cloud.retail.v2.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2.Product]s, this operation could + take hours to complete. To get a sample of + [Product][google.cloud.retail.v2.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2.PurgeProductsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2 + + def sample_purge_products(): + # Create a client + client = retail_v2.ProductServiceClient() + + # Initialize request argument(s) + request = retail_v2.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2.types.PurgeProductsRequest, dict]): + The request object. Request message for PurgeProducts + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2.types.PurgeProductsResponse` Response of the PurgeProductsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeProductsRequest): + request = purge_config.PurgeProductsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_products] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeProductsResponse, + metadata_type=purge_config.PurgeProductsMetadata, + ) + + # Done; return the response. + return response + def import_products( self, request: Optional[Union[import_config.ImportProductsRequest, dict]] = None, @@ -1738,10 +1855,11 @@ def add_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -1898,10 +2016,11 @@ def remove_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/base.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/base.py index 5e23fad37a1b..09621f95ac88 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/base.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/base.py @@ -31,7 +31,7 @@ from google.cloud.retail_v2.types import import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service +from google.cloud.retail_v2.types import product_service, purge_config DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -157,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_products: gapic_v1.method.wrap_method( + self.purge_products, + default_timeout=None, + client_info=client_info, + ), self.import_products: gapic_v1.method.wrap_method( self.import_products, default_retry=retries.Retry( @@ -261,6 +266,15 @@ def delete_product( ]: raise NotImplementedError() + @property + def purge_products( + self, + ) -> Callable[ + [purge_config.PurgeProductsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def import_products( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc.py index 1ceda4705ac2..1ad4d0002276 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc.py @@ -28,7 +28,7 @@ from google.cloud.retail_v2.types import import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service +from google.cloud.retail_v2.types import product_service, purge_config from .base import DEFAULT_CLIENT_INFO, ProductServiceTransport @@ -387,6 +387,51 @@ def delete_product( ) return self._stubs["delete_product"] + @property + def purge_products( + self, + ) -> Callable[[purge_config.PurgeProductsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge products method over gRPC. + + Permanently deletes all selected + [Product][google.cloud.retail.v2.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2.Product]s may + still be returned by + [ProductService.GetProduct][google.cloud.retail.v2.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2.Product]s, this operation could + take hours to complete. To get a sample of + [Product][google.cloud.retail.v2.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2.PurgeProductsRequest.force] + to false. + + Returns: + Callable[[~.PurgeProductsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_products" not in self._stubs: + self._stubs["purge_products"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2.ProductService/PurgeProducts", + request_serializer=purge_config.PurgeProductsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_products"] + @property def import_products( self, @@ -507,10 +552,11 @@ def add_fulfillment_places( ]: r"""Return a callable for the add fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -565,10 +611,11 @@ def remove_fulfillment_places( ]: r"""Return a callable for the remove fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc_asyncio.py index cd891c79a9ed..ea98fad70c3e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/grpc_asyncio.py @@ -30,7 +30,7 @@ from google.cloud.retail_v2.types import import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service +from google.cloud.retail_v2.types import product_service, purge_config from .base import DEFAULT_CLIENT_INFO, ProductServiceTransport from .grpc import ProductServiceGrpcTransport @@ -398,6 +398,53 @@ def delete_product( ) return self._stubs["delete_product"] + @property + def purge_products( + self, + ) -> Callable[ + [purge_config.PurgeProductsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge products method over gRPC. + + Permanently deletes all selected + [Product][google.cloud.retail.v2.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2.Product]s may + still be returned by + [ProductService.GetProduct][google.cloud.retail.v2.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2.Product]s, this operation could + take hours to complete. To get a sample of + [Product][google.cloud.retail.v2.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2.PurgeProductsRequest.force] + to false. + + Returns: + Callable[[~.PurgeProductsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_products" not in self._stubs: + self._stubs["purge_products"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2.ProductService/PurgeProducts", + request_serializer=purge_config.PurgeProductsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_products"] + @property def import_products( self, @@ -523,10 +570,11 @@ def add_fulfillment_places( ]: r"""Return a callable for the add fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -582,10 +630,11 @@ def remove_fulfillment_places( ]: r"""Return a callable for the remove fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -777,6 +826,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_products: gapic_v1.method_async.wrap_method( + self.purge_products, + default_timeout=None, + client_info=client_info, + ), self.import_products: gapic_v1.method_async.wrap_method( self.import_products, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/rest.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/rest.py index 74f2fe3011a6..2f547d840189 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/rest.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/transports/rest.py @@ -49,7 +49,7 @@ from google.cloud.retail_v2.types import import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service +from google.cloud.retail_v2.types import product_service, purge_config from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import ProductServiceTransport @@ -128,6 +128,14 @@ def post_list_products(self, response): logging.log(f"Received response: {response}") return response + def pre_purge_products(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_products(self, response): + logging.log(f"Received response: {response}") + return response + def pre_remove_fulfillment_places(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -312,6 +320,29 @@ def post_list_products( """ return response + def pre_purge_products( + self, + request: purge_config.PurgeProductsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeProductsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_products + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProductService server. + """ + return request, metadata + + def post_purge_products( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_products + + Override in a subclass to manipulate the response + after it is returned by the ProductService server but before + it is returned to user code. + """ + return response + def pre_remove_fulfillment_places( self, request: product_service.RemoveFulfillmentPlacesRequest, @@ -1252,6 +1283,100 @@ def __call__( resp = self._interceptor.post_list_products(resp) return resp + class _PurgeProducts(ProductServiceRestStub): + def __hash__(self): + return hash("PurgeProducts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeProductsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge products method over HTTP. + + Args: + request (~.purge_config.PurgeProductsRequest): + The request object. Request message for PurgeProducts + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*/catalogs/*/branches/*}/products:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_products(request, metadata) + pb_request = purge_config.PurgeProductsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_products(resp) + return resp + class _RemoveFulfillmentPlaces(ProductServiceRestStub): def __hash__(self): return hash("RemoveFulfillmentPlaces") @@ -1700,6 +1825,14 @@ def list_products( # In C++ this would require a dynamic_cast return self._ListProducts(self._session, self._host, self._interceptor) # type: ignore + @property + def purge_products( + self, + ) -> Callable[[purge_config.PurgeProductsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeProducts(self._session, self._host, self._interceptor) # type: ignore + @property def remove_fulfillment_places( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/__init__.py index 8f1422781401..8ac7d937c80f 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/__init__.py @@ -132,7 +132,14 @@ UpdateProductRequest, ) from .promotion import Promotion -from .purge_config import PurgeMetadata, PurgeUserEventsRequest, PurgeUserEventsResponse +from .purge_config import ( + PurgeMetadata, + PurgeProductsMetadata, + PurgeProductsRequest, + PurgeProductsResponse, + PurgeUserEventsRequest, + PurgeUserEventsResponse, +) from .search_service import ExperimentInfo, SearchRequest, SearchResponse from .serving_config import ServingConfig from .serving_config_service import ( @@ -261,6 +268,9 @@ "UpdateProductRequest", "Promotion", "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", "ExperimentInfo", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/catalog.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/catalog.py index 18755fbb58f0..1a2cf0d282d8 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/catalog.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/catalog.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.retail_v2.types import common, import_config @@ -162,7 +163,10 @@ class CatalogAttribute(proto.Message): faceted, or boosted in [SearchService.Search][google.cloud.retail.v2.SearchService.Search]. - Must be specified, otherwise throws INVALID_FORMAT error. + Must be specified when + [AttributesConfig.attribute_config_level][google.cloud.retail.v2.AttributesConfig.attribute_config_level] + is CATALOG_LEVEL_ATTRIBUTE_CONFIG, otherwise throws + INVALID_FORMAT error. dynamic_facetable_option (google.cloud.retail_v2.types.CatalogAttribute.DynamicFacetableOption): If DYNAMIC_FACETABLE_ENABLED, attribute values are available for dynamic facet. Could only be DYNAMIC_FACETABLE_DISABLED @@ -185,7 +189,10 @@ class CatalogAttribute(proto.Message): as there are no text values associated to numerical attributes. - Must be specified, otherwise throws INVALID_FORMAT error. + Must be specified, when + [AttributesConfig.attribute_config_level][google.cloud.retail.v2.AttributesConfig.attribute_config_level] + is CATALOG_LEVEL_ATTRIBUTE_CONFIG, otherwise throws + INVALID_FORMAT error. exact_searchable_option (google.cloud.retail_v2.types.CatalogAttribute.ExactSearchableOption): If EXACT_SEARCHABLE_ENABLED, attribute values will be exact searchable. This property only applies to textual custom @@ -197,6 +204,8 @@ class CatalogAttribute(proto.Message): the search results. If unset, the server behavior defaults to [RETRIEVABLE_DISABLED][google.cloud.retail.v2.CatalogAttribute.RetrievableOption.RETRIEVABLE_DISABLED]. + facet_config (google.cloud.retail_v2.types.CatalogAttribute.FacetConfig): + Contains facet options. """ class AttributeType(proto.Enum): @@ -298,6 +307,213 @@ class RetrievableOption(proto.Enum): RETRIEVABLE_ENABLED = 1 RETRIEVABLE_DISABLED = 2 + class FacetConfig(proto.Message): + r"""Possible options for the facet that corresponds to the + current attribute config. + + Attributes: + facet_intervals (MutableSequence[google.cloud.retail_v2.types.Interval]): + If you don't set the facet + [SearchRequest.FacetSpec.FacetKey.intervals][google.cloud.retail.v2.SearchRequest.FacetSpec.FacetKey.intervals] + in the request to a numerical attribute, then we use the + computed intervals with rounded bounds obtained from all its + product numerical attribute values. The computed intervals + might not be ideal for some attributes. Therefore, we give + you the option to overwrite them with the facet_intervals + field. The maximum of facet intervals per + [CatalogAttribute][google.cloud.retail.v2.CatalogAttribute] + is 40. Each interval must have a lower bound or an upper + bound. If both bounds are provided, then the lower bound + must be smaller or equal than the upper bound. + ignored_facet_values (MutableSequence[google.cloud.retail_v2.types.CatalogAttribute.FacetConfig.IgnoredFacetValues]): + Each instance represents a list of attribute values to + ignore as facet values for a specific time range. The + maximum number of instances per + [CatalogAttribute][google.cloud.retail.v2.CatalogAttribute] + is 25. + merged_facet_values (MutableSequence[google.cloud.retail_v2.types.CatalogAttribute.FacetConfig.MergedFacetValue]): + Each instance replaces a list of facet values by a merged + facet value. If a facet value is not in any list, then it + will stay the same. To avoid conflicts, only paths of length + 1 are accepted. In other words, if "dark_blue" merged into + "BLUE", then the latter can't merge into "blues" because + this would create a path of length 2. The maximum number of + instances of MergedFacetValue per + [CatalogAttribute][google.cloud.retail.v2.CatalogAttribute] + is 100. This feature is available only for textual custom + attributes. + merged_facet (google.cloud.retail_v2.types.CatalogAttribute.FacetConfig.MergedFacet): + Use this field only if you want to merge a + facet key into another facet key. + rerank_config (google.cloud.retail_v2.types.CatalogAttribute.FacetConfig.RerankConfig): + Set this field only if you want to rerank + based on facet values engaged by the user for + the current key. This option is only possible + for custom facetable textual keys. + """ + + class IgnoredFacetValues(proto.Message): + r"""[Facet values][google.cloud.retail.v2.SearchResponse.Facet.values] + to ignore on [facets][google.cloud.retail.v2.SearchResponse.Facet] + during the specified time range for the given + [SearchResponse.Facet.key][google.cloud.retail.v2.SearchResponse.Facet.key] + attribute. + + Attributes: + values (MutableSequence[str]): + List of facet values to ignore for the + following time range. The facet values are the + same as the attribute values. There is a limit + of 10 values per instance of IgnoredFacetValues. + Each value can have at most 128 characters. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time range for the current list of facet + values to ignore. If multiple time ranges are + specified for an facet value for the current + attribute, consider all of them. If both are + empty, ignore always. If start time and end time + are set, then start time must be before end + time. + If start time is not empty and end time is + empty, then will ignore these facet values after + the start time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If start time is empty and end time is not + empty, then ignore these facet values before end + time. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class MergedFacetValue(proto.Message): + r"""Replaces a set of textual facet values by the same (possibly + different) merged facet value. Each facet value should appear at + most once as a value per + [CatalogAttribute][google.cloud.retail.v2.CatalogAttribute]. This + feature is available only for textual custom attributes. + + Attributes: + values (MutableSequence[str]): + All the facet values that are replaces by the same + [merged_value][google.cloud.retail.v2.CatalogAttribute.FacetConfig.MergedFacetValue.merged_value] + that follows. The maximum number of values per + MergedFacetValue is 25. Each value can have up to 128 + characters. + merged_value (str): + All the previous values are replaced by this merged facet + value. This merged_value must be non-empty and can have up + to 128 characters. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + merged_value: str = proto.Field( + proto.STRING, + number=2, + ) + + class MergedFacet(proto.Message): + r"""The current facet key (i.e. attribute config) maps into the + [merged_facet_key][google.cloud.retail.v2.CatalogAttribute.FacetConfig.MergedFacet.merged_facet_key]. + A facet key can have at most one child. The current facet key and + the merged facet key need both to be textual custom attributes or + both numerical custom attributes (same type). + + Attributes: + merged_facet_key (str): + The merged facet key should be a valid facet + key that is different than the facet key of the + current catalog attribute. We refer this is + merged facet key as the child of the current + catalog attribute. This merged facet key can't + be a parent of another facet key (i.e. no + directed path of length 2). This merged facet + key needs to be either a textual custom + attribute or a numerical custom attribute. + """ + + merged_facet_key: str = proto.Field( + proto.STRING, + number=1, + ) + + class RerankConfig(proto.Message): + r"""Options to rerank based on facet values engaged by the user for the + current key. That key needs to be a custom textual key and + facetable. To use this control, you also need to pass all the facet + keys engaged by the user in the request using the field + [SearchRequest.FacetSpec]. In particular, if you don't pass the + facet keys engaged that you want to rerank on, this control won't be + effective. Moreover, to obtain better results, the facet values that + you want to rerank on should be close to English (ideally made of + words, underscores, and spaces). + + Attributes: + rerank_facet (bool): + If set to true, then we also rerank the + dynamic facets based on the facet values engaged + by the user for the current attribute key during + serving. + facet_values (MutableSequence[str]): + If empty, rerank on all facet values for the + current key. Otherwise, will rerank on the facet + values from this list only. + """ + + rerank_facet: bool = proto.Field( + proto.BOOL, + number=1, + ) + facet_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + facet_intervals: MutableSequence[common.Interval] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common.Interval, + ) + ignored_facet_values: MutableSequence[ + "CatalogAttribute.FacetConfig.IgnoredFacetValues" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CatalogAttribute.FacetConfig.IgnoredFacetValues", + ) + merged_facet_values: MutableSequence[ + "CatalogAttribute.FacetConfig.MergedFacetValue" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CatalogAttribute.FacetConfig.MergedFacetValue", + ) + merged_facet: "CatalogAttribute.FacetConfig.MergedFacet" = proto.Field( + proto.MESSAGE, + number=4, + message="CatalogAttribute.FacetConfig.MergedFacet", + ) + rerank_config: "CatalogAttribute.FacetConfig.RerankConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="CatalogAttribute.FacetConfig.RerankConfig", + ) + key: str = proto.Field( proto.STRING, number=1, @@ -336,6 +552,11 @@ class RetrievableOption(proto.Enum): number=12, enum=RetrievableOption, ) + facet_config: FacetConfig = proto.Field( + proto.MESSAGE, + number=13, + message=FacetConfig, + ) class AttributesConfig(proto.Message): @@ -428,7 +649,7 @@ class CompletionConfig(proto.Message): Can use [GetOperation][google.longrunning.Operations.GetOperation] - API to retrieve the latest state of the Long Running + API method to retrieve the latest state of the Long Running Operation. denylist_input_config (google.cloud.retail_v2.types.CompletionDataInputConfig): Output only. The source data for the latest diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/common.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/common.py index 78379e96b85b..5f0ffcdcb277 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/common.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/common.py @@ -137,6 +137,11 @@ class Condition(proto.Message): Range of time(s) specifying when Condition is active. Condition true if any time range matches. + page_categories (MutableSequence[str]): + Used to support browse uses cases. A list (up to 10 entries) + of categories or departments. The format should be the same + as + [UserEvent.page_categories][google.cloud.retail.v2.UserEvent.page_categories]; """ class QueryTerm(proto.Message): @@ -197,6 +202,10 @@ class TimeRange(proto.Message): number=3, message=TimeRange, ) + page_categories: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class Rule(proto.Message): @@ -251,6 +260,16 @@ class Rule(proto.Message): Treats a set of terms as synonyms of one another. + This field is a member of `oneof`_ ``action``. + force_return_facet_action (google.cloud.retail_v2.types.Rule.ForceReturnFacetAction): + Force returns an attribute as a facet in the + request. + + This field is a member of `oneof`_ ``action``. + remove_facet_action (google.cloud.retail_v2.types.Rule.RemoveFacetAction): + Remove an attribute as a facet in the request + (if present). + This field is a member of `oneof`_ ``action``. condition (google.cloud.retail_v2.types.Condition): Required. The condition that triggers the @@ -322,6 +341,7 @@ class FilterAction(proto.Message): provided with the SearchRequest. The AND operator is used to combine the query's existing filters with the filter rule(s). NOTE: May result in 0 results when filters conflict. + - Action Result: Filters the returned objects to be ONLY those that passed the filter. @@ -334,8 +354,8 @@ class FilterAction(proto.Message): must be set. - Filter syntax is identical to [SearchRequest.filter][google.cloud.retail.v2.SearchRequest.filter]. - See more details at the Retail Search `user - guide `__. + For more information, see + `Filter `__. - To filter products with product ID "product_1" or "product_2", and color "Red" or "Blue": *(id: ANY("product_1", "product_2"))* *AND* *(colorFamilies: @@ -350,11 +370,8 @@ class FilterAction(proto.Message): class RedirectAction(proto.Message): r"""Redirects a shopper to a specific page. - - Rule Condition: - - - Must specify - [Condition.query_terms][google.cloud.retail.v2.Condition.query_terms]. - + - Rule Condition: Must specify + [Condition.query_terms][google.cloud.retail.v2.Condition.query_terms]. - Action Input: Request Query - Action Result: Redirects shopper to provided uri. @@ -494,6 +511,108 @@ class IgnoreAction(proto.Message): number=1, ) + class ForceReturnFacetAction(proto.Message): + r"""Force returns an attribute/facet in the request around a certain + position or above. + + - Rule Condition: Must specify non-empty + [Condition.query_terms][google.cloud.retail.v2.Condition.query_terms] + (for search only) or + [Condition.page_categories][google.cloud.retail.v2.Condition.page_categories] + (for browse only), but can't specify both. + + - Action Inputs: attribute name, position + + - Action Result: Will force return a facet key around a certain + position or above if the condition is satisfied. + + Example: Suppose the query is "shoes", the + [Condition.query_terms][google.cloud.retail.v2.Condition.query_terms] + is "shoes", the + [ForceReturnFacetAction.FacetPositionAdjustment.attribute_name][google.cloud.retail.v2.Rule.ForceReturnFacetAction.FacetPositionAdjustment.attribute_name] + is "size" and the + [ForceReturnFacetAction.FacetPositionAdjustment.position][google.cloud.retail.v2.Rule.ForceReturnFacetAction.FacetPositionAdjustment.position] + is 8. + + Two cases: a) The facet key "size" is not already in the top 8 + slots, then the facet "size" will appear at a position close to 8. + b) The facet key "size" in among the top 8 positions in the request, + then it will stay at its current rank. + + Attributes: + facet_position_adjustments (MutableSequence[google.cloud.retail_v2.types.Rule.ForceReturnFacetAction.FacetPositionAdjustment]): + Each instance corresponds to a force return + attribute for the given condition. There can't + be more 3 instances here. + """ + + class FacetPositionAdjustment(proto.Message): + r"""Each facet position adjustment consists of a single attribute + name (i.e. facet key) along with a specified position. + + Attributes: + attribute_name (str): + The attribute name to force return as a + facet. Each attribute name should be a valid + attribute name, be non-empty and contain at most + 80 characters long. + position (int): + This is the position in the request as + explained above. It should be strictly positive + be at most 100. + """ + + attribute_name: str = proto.Field( + proto.STRING, + number=1, + ) + position: int = proto.Field( + proto.INT32, + number=2, + ) + + facet_position_adjustments: MutableSequence[ + "Rule.ForceReturnFacetAction.FacetPositionAdjustment" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Rule.ForceReturnFacetAction.FacetPositionAdjustment", + ) + + class RemoveFacetAction(proto.Message): + r"""Removes an attribute/facet in the request if is present. + + - Rule Condition: Must specify non-empty + [Condition.query_terms][google.cloud.retail.v2.Condition.query_terms] + (for search only) or + [Condition.page_categories][google.cloud.retail.v2.Condition.page_categories] + (for browse only), but can't specify both. + + - Action Input: attribute name + + - Action Result: Will remove the attribute (as a facet) from the + request if it is present. + + Example: Suppose the query is "shoes", the + [Condition.query_terms][google.cloud.retail.v2.Condition.query_terms] + is "shoes" and the attribute name "size", then facet key "size" will + be removed from the request (if it is present). + + Attributes: + attribute_names (MutableSequence[str]): + The attribute names (i.e. facet keys) to + remove from the dynamic facets (if present in + the request). There can't be more 3 attribute + names. Each attribute name should be a valid + attribute name, be non-empty and contain at most + 80 characters. + """ + + attribute_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + boost_action: BoostAction = proto.Field( proto.MESSAGE, number=2, @@ -542,6 +661,18 @@ class IgnoreAction(proto.Message): oneof="action", message=TwowaySynonymsAction, ) + force_return_facet_action: ForceReturnFacetAction = proto.Field( + proto.MESSAGE, + number=12, + oneof="action", + message=ForceReturnFacetAction, + ) + remove_facet_action: RemoveFacetAction = proto.Field( + proto.MESSAGE, + number=13, + oneof="action", + message=RemoveFacetAction, + ) condition: "Condition" = proto.Field( proto.MESSAGE, number=1, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/completion_service.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/completion_service.py index 5deb4846c381..50c5a03a1ce6 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/completion_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/completion_service.py @@ -106,12 +106,16 @@ class CompleteQueryRequest(proto.Message): The maximum allowed max suggestions is 20. If it is set higher, it will be capped by 20. + enable_attribute_suggestions (bool): + If true, attribute suggestions are enabled + and provided in response. + This field is only available for "cloud-retail" + dataset. entity (str): - The entity for customers that may run multiple different - entities, domains, sites or regions, for example, - ``Google US``, ``Google Ads``, ``Waymo``, ``google.com``, - ``youtube.com``, etc. If this is set, it should be exactly - matched with + The entity for customers who run multiple entities, domains, + sites, or regions, for example, ``Google US``, + ``Google Ads``, ``Waymo``, ``google.com``, ``youtube.com``, + etc. If this is set, it must be an exact match with [UserEvent.entity][google.cloud.retail.v2.UserEvent.entity] to get per-entity autocomplete results. """ @@ -144,6 +148,10 @@ class CompleteQueryRequest(proto.Message): proto.INT32, number=5, ) + enable_attribute_suggestions: bool = proto.Field( + proto.BOOL, + number=9, + ) entity: str = proto.Field( proto.STRING, number=10, @@ -164,10 +172,10 @@ class CompleteQueryResponse(proto.Message): for search events resulting from this completion, which enables accurate attribution of complete model performance. recent_search_results (MutableSequence[google.cloud.retail_v2.types.CompleteQueryResponse.RecentSearchResult]): - Matched recent searches of this user. The maximum number of - recent searches is 10. This field is a restricted feature. - Contact Retail Search support team if you are interested in - enabling it. + Deprecated. Matched recent searches of this user. The + maximum number of recent searches is 10. This field is a + restricted feature. If you want to enable it, contact Retail + Search support. This feature is only available when [CompleteQueryRequest.visitor_id][google.cloud.retail.v2.CompleteQueryRequest.visitor_id] @@ -219,7 +227,7 @@ class CompletionResult(proto.Message): ) class RecentSearchResult(proto.Message): - r"""Recent search of this user. + r"""Deprecated: Recent search of this user. Attributes: recent_search (str): diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/import_config.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/import_config.py index 351a6f5c5b4d..4068da9f850b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/import_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/import_config.py @@ -273,7 +273,9 @@ class ImportProductsRequest(proto.Message): during the Import. update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided imported ``products`` - to update. If not set, all fields are updated. + to update. If not set, all fields are updated. If provided, + only the existing product fields are updated. Missing + products will not be created. reconciliation_mode (google.cloud.retail_v2.types.ImportProductsRequest.ReconciliationMode): The mode of reconciliation between existing products and the products to be imported. Defaults to @@ -288,10 +290,16 @@ class ImportProductsRequest(proto.Message): ``projects/{project}/topics/{topic}``. It has to be within the same project as [ImportProductsRequest.parent][google.cloud.retail.v2.ImportProductsRequest.parent]. - Make sure that + Make sure that both + ``cloud-retail-customer-data-access@system.gserviceaccount.com`` + and ``service-@gcp-sa-retail.iam.gserviceaccount.com`` - has the ``pubsub.topics.publish`` IAM permission on the + have the ``pubsub.topics.publish`` IAM permission on the topic. + + Only supported when + [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2.ImportProductsRequest.reconciliation_mode] + is set to ``FULL``. """ class ReconciliationMode(proto.Enum): diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/model.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/model.py index b8d8adf471da..3c33aa3ca20c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/model.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/model.py @@ -150,6 +150,8 @@ class Model(proto.Message): Output only. The list of valid serving configs associated with the PageOptimizationConfig. + model_features_config (google.cloud.retail_v2.types.Model.ModelFeaturesConfig): + Optional. Additional model features config. """ class ServingState(proto.Enum): @@ -235,6 +237,27 @@ class DataState(proto.Enum): DATA_OK = 1 DATA_ERROR = 2 + class ContextProductsType(proto.Enum): + r"""Use single or multiple context products for recommendations. + + Values: + CONTEXT_PRODUCTS_TYPE_UNSPECIFIED (0): + Unspecified default value, should never be explicitly set. + Defaults to + [MULTIPLE_CONTEXT_PRODUCTS][google.cloud.retail.v2.Model.ContextProductsType.MULTIPLE_CONTEXT_PRODUCTS]. + SINGLE_CONTEXT_PRODUCT (1): + Use only a single product as context for the + recommendation. Typically used on pages like + add-to-cart or product details. + MULTIPLE_CONTEXT_PRODUCTS (2): + Use one or multiple products as context for + the recommendation. Typically used on shopping + cart pages. + """ + CONTEXT_PRODUCTS_TYPE_UNSPECIFIED = 0 + SINGLE_CONTEXT_PRODUCT = 1 + MULTIPLE_CONTEXT_PRODUCTS = 2 + class ServingConfigList(proto.Message): r"""Represents an ordered combination of valid serving configs, which can be used for ``PAGE_OPTIMIZATION`` recommendations. @@ -250,6 +273,45 @@ class ServingConfigList(proto.Message): number=1, ) + class FrequentlyBoughtTogetherFeaturesConfig(proto.Message): + r"""Additional configs for the frequently-bought-together model + type. + + Attributes: + context_products_type (google.cloud.retail_v2.types.Model.ContextProductsType): + Optional. Specifies the context of the model when it is used + in predict requests. Can only be set for the + ``frequently-bought-together`` type. If it isn't specified, + it defaults to + [MULTIPLE_CONTEXT_PRODUCTS][google.cloud.retail.v2.Model.ContextProductsType.MULTIPLE_CONTEXT_PRODUCTS]. + """ + + context_products_type: "Model.ContextProductsType" = proto.Field( + proto.ENUM, + number=2, + enum="Model.ContextProductsType", + ) + + class ModelFeaturesConfig(proto.Message): + r"""Additional model features config. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + frequently_bought_together_config (google.cloud.retail_v2.types.Model.FrequentlyBoughtTogetherFeaturesConfig): + Additional configs for + frequently-bought-together models. + + This field is a member of `oneof`_ ``type_dedicated_config``. + """ + + frequently_bought_together_config: "Model.FrequentlyBoughtTogetherFeaturesConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type_dedicated_config", + message="Model.FrequentlyBoughtTogetherFeaturesConfig", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -315,6 +377,11 @@ class ServingConfigList(proto.Message): number=19, message=ServingConfigList, ) + model_features_config: ModelFeaturesConfig = proto.Field( + proto.MESSAGE, + number=22, + message=ModelFeaturesConfig, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/product.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/product.py index 2552342c56c6..433773fb7714 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/product.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/product.py @@ -46,28 +46,25 @@ class Product(proto.Message): Attributes: expire_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp when this product becomes unavailable for - [SearchService.Search][google.cloud.retail.v2.SearchService.Search]. - Note that this is only applicable to - [Type.PRIMARY][google.cloud.retail.v2.Product.Type.PRIMARY] - and - [Type.COLLECTION][google.cloud.retail.v2.Product.Type.COLLECTION], - and ignored for - [Type.VARIANT][google.cloud.retail.v2.Product.Type.VARIANT]. + Note that this field is applied in the following ways: + + - If the [Product][google.cloud.retail.v2.Product] is + already expired when it is uploaded, this product is not + indexed for search. + + - If the [Product][google.cloud.retail.v2.Product] is not + expired when it is uploaded, only the + [Type.PRIMARY][google.cloud.retail.v2.Product.Type.PRIMARY]'s + and + [Type.COLLECTION][google.cloud.retail.v2.Product.Type.COLLECTION]'s + expireTime is respected, and + [Type.VARIANT][google.cloud.retail.v2.Product.Type.VARIANT]'s + expireTime is not used. + In general, we suggest the users to delete the stale products explicitly, instead of using this field to determine staleness. - If it is set, the [Product][google.cloud.retail.v2.Product] - is not available for - [SearchService.Search][google.cloud.retail.v2.SearchService.Search] - after - [expire_time][google.cloud.retail.v2.Product.expire_time]. - However, the product can still be retrieved by - [ProductService.GetProduct][google.cloud.retail.v2.ProductService.GetProduct] - and - [ProductService.ListProducts][google.cloud.retail.v2.ProductService.ListProducts]. - [expire_time][google.cloud.retail.v2.Product.expire_time] must be later than [available_time][google.cloud.retail.v2.Product.available_time] @@ -219,9 +216,10 @@ class Product(proto.Message): INVALID_ARGUMENT error is returned. At most 250 values are allowed per - [Product][google.cloud.retail.v2.Product]. Empty values are - not allowed. Each value must be a UTF-8 encoded string with - a length limit of 5,000 characters. Otherwise, an + [Product][google.cloud.retail.v2.Product] unless overridden + through the Google Cloud console. Empty values are not + allowed. Each value must be a UTF-8 encoded string with a + length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is returned. Corresponding properties: Google Merchant Center property @@ -242,10 +240,10 @@ class Product(proto.Message): brands (MutableSequence[str]): The brands of the product. - A maximum of 30 brands are allowed. Each brand must be a - UTF-8 encoded string with a length limit of 1,000 - characters. Otherwise, an INVALID_ARGUMENT error is - returned. + A maximum of 30 brands are allowed unless overridden through + the Google Cloud console. Each brand must be a UTF-8 encoded + string with a length limit of 1,000 characters. Otherwise, + an INVALID_ARGUMENT error is returned. Corresponding properties: Google Merchant Center property `brand `__. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/promotion.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/promotion.py index a70eda92cd27..7363d14bd15e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/promotion.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/promotion.py @@ -40,8 +40,8 @@ class Promotion(proto.Message): ID_1_LIKE_THIS. Otherwise, an INVALID_ARGUMENT error is returned. - Google Merchant Center property - `promotion `__. + Corresponds to Google Merchant Center property + `promotion_id `__. """ promotion_id: str = proto.Field( diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/purge_config.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/purge_config.py index 9fc5987c02f5..863ffe21ba85 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/purge_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/purge_config.py @@ -17,12 +17,16 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.retail.v2", manifest={ "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", }, @@ -37,6 +41,145 @@ class PurgeMetadata(proto.Message): """ +class PurgeProductsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeProducts + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class PurgeProductsRequest(proto.Message): + r"""Request message for PurgeProducts method. + + Attributes: + parent (str): + Required. The resource name of the branch under which the + products are created. The format is + ``projects/${projectId}/locations/global/catalogs/${catalogId}/branches/${branchId}`` + filter (str): + Required. The filter string to specify the products to be + deleted with a length limit of 5,000 characters. + + Empty string filter is not allowed. "*" implies delete all + items in a branch. + + The eligible fields for filtering are: + + - ``availability``: Double quoted + [Product.availability][google.cloud.retail.v2.Product.availability] + string. + - ``create_time`` : in ISO 8601 "zulu" format. + + Supported syntax: + + - Comparators (">", "<", ">=", "<=", "="). Examples: + + - create_time <= "2015-02-13T17:05:46Z" + - availability = "IN_STOCK" + + - Conjunctions ("AND") Examples: + + - create_time <= "2015-02-13T17:05:46Z" AND availability + = "PREORDER" + + - Disjunctions ("OR") Examples: + + - create_time <= "2015-02-13T17:05:46Z" OR availability + = "IN_STOCK" + + - Can support nested queries. Examples: + + - (create_time <= "2015-02-13T17:05:46Z" AND + availability = "PREORDER") OR (create_time >= + "2015-02-14T13:03:32Z" AND availability = "IN_STOCK") + + - Filter Limits: + + - Filter should not contain more than 6 conditions. + - Max nesting depth should not exceed 2 levels. + + Examples queries: + + - Delete back order products created before a timestamp. + create_time <= "2015-02-13T17:05:46Z" OR availability = + "BACKORDER". + force (bool): + Actually perform the purge. If ``force`` is set to false, + the method will return the expected purge count without + deleting any products. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeProductsResponse(proto.Message): + r"""Response of the PurgeProductsRequest. If the long running + operation is successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of products purged as a + result of the operation. + purge_sample (MutableSequence[str]): + A sample of the product names that will be deleted. Only + populated if ``force`` is set to false. A max of 100 names + will be returned and the names are chosen at random. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + purge_sample: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class PurgeUserEventsRequest(proto.Message): r"""Request message for PurgeUserEvents method. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/search_service.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/search_service.py index 7957bf274367..f08b624defdb 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/search_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/search_service.py @@ -50,7 +50,7 @@ class SearchRequest(proto.Message): or the name of the legacy placement resource, such as ``projects/*/locations/global/catalogs/default_catalog/placements/default_search``. This field is used to identify the serving config name and - the set of models that will be used to make the search. + the set of models that are used to make the search. branch (str): The branch resource name, such as ``projects/*/locations/global/catalogs/default_catalog/branches/0``. @@ -111,8 +111,8 @@ class SearchRequest(proto.Message): The filter syntax consists of an expression language for constructing a predicate from one or more fields of the products being filtered. Filter expression is - case-sensitive. See more details at this `user - guide `__. + case-sensitive. For more information, see + `Filter `__. If this field is unrecognizable, an INVALID_ARGUMENT is returned. @@ -121,24 +121,22 @@ class SearchRequest(proto.Message): search without checking any filters on the search page. The filter applied to every search request when quality - improvement such as query expansion is needed. For example, - if a query does not have enough results, an expanded query - with - [SearchRequest.canonical_filter][google.cloud.retail.v2.SearchRequest.canonical_filter] - will be returned as a supplement of the original query. This - field is strongly recommended to achieve high search - quality. - - See - [SearchRequest.filter][google.cloud.retail.v2.SearchRequest.filter] - for more details about filter syntax. + improvement such as query expansion is needed. In the case a + query does not have a sufficient amount of results this + filter will be used to determine whether or not to enable + the query expansion flow. The original filter will still be + used for the query expanded search. This field is strongly + recommended to achieve high search quality. + + For more information about filter syntax, see + [SearchRequest.filter][google.cloud.retail.v2.SearchRequest.filter]. order_by (str): The order in which products are returned. Products can be ordered by a field in an [Product][google.cloud.retail.v2.Product] object. Leave it unset if ordered by relevance. OrderBy expression is - case-sensitive. See more details at this `user - guide `__. + case-sensitive. For more information, see + `Order `__. If this field is unrecognizable, an INVALID_ARGUMENT is returned. @@ -157,9 +155,9 @@ class SearchRequest(proto.Message): facets. Notice that only textual facets can be dynamically generated. boost_spec (google.cloud.retail_v2.types.SearchRequest.BoostSpec): - Boost specification to boost certain products. See more - details at this `user - guide `__. + Boost specification to boost certain products. For more + information, see `Boost + results `__. Notice that if both [ServingConfig.boost_control_ids][google.cloud.retail.v2.ServingConfig.boost_control_ids] @@ -171,9 +169,9 @@ class SearchRequest(proto.Message): boost scores from all matched boost conditions. query_expansion_spec (google.cloud.retail_v2.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the - conditions under which query expansion will occur. See more - details at this `user - guide `__. + conditions under which query expansion occurs. For more + information, see `Query + expansion `__. variant_rollup_keys (MutableSequence[str]): The keys to fetch and rollup the matching [variant][google.cloud.retail.v2.Product.Type.VARIANT] @@ -259,9 +257,9 @@ class SearchRequest(proto.Message): If this field is set to an invalid value other than these, an INVALID_ARGUMENT error is returned. page_categories (MutableSequence[str]): - The categories associated with a category page. Required for - category navigation queries to achieve good search quality. - The format should be the same as + The categories associated with a category page. Must be set + for category navigation queries to achieve good search + quality. The format should be the same as [UserEvent.page_categories][google.cloud.retail.v2.UserEvent.page_categories]; To represent full path of category, use '>' sign to separate @@ -306,9 +304,9 @@ class SearchRequest(proto.Message): - Keys must start with a lowercase letter or international character. - See `Google Cloud - Document `__ - for more details. + For more information, see `Requirements for + labels `__ + in the Resource Manager documentation. spell_correction_spec (google.cloud.retail_v2.types.SearchRequest.SpellCorrectionSpec): The spell correction specification that specifies the mode under which spell correction @@ -416,18 +414,17 @@ class FacetSpec(proto.Message): enable_dynamic_position (bool): Enables dynamic position for this facet. If set to true, the position of this facet among all facets in the response is - determined by Google Retail Search. It will be ordered - together with dynamic facets if dynamic facets is enabled. - If set to false, the position of this facet in the response - will be the same as in the request, and it will be ranked - before the facets with dynamic position enable and all - dynamic facets. + determined by Google Retail Search. It is ordered together + with dynamic facets if dynamic facets is enabled. If set to + false, the position of this facet in the response is the + same as in the request, and it is ranked before the facets + with dynamic position enable and all dynamic facets. For example, you may always want to have rating facet returned in the response, but it's not necessarily to always display the rating facet at the top. In that case, you can set enable_dynamic_position to true so that the position of - rating facet in response will be determined by Google Retail + rating facet in response is determined by Google Retail Search. Another example, assuming you have the following facets in @@ -439,14 +436,14 @@ class FacetSpec(proto.Message): - "brands", enable_dynamic_position = false - And also you have a dynamic facets enable, which will - generate a facet 'gender'. Then the final order of the - facets in the response can be ("price", "brands", "rating", - "gender") or ("price", "brands", "gender", "rating") depends - on how Google Retail Search orders "gender" and "rating" - facets. However, notice that "price" and "brands" will - always be ranked at 1st and 2nd position since their - enable_dynamic_position are false. + And also you have a dynamic facets enable, which generates a + facet "gender". Then, the final order of the facets in the + response can be ("price", "brands", "rating", "gender") or + ("price", "brands", "gender", "rating") depends on how + Google Retail Search orders "gender" and "rating" facets. + However, notice that "price" and "brands" are always ranked + at first and second position because their + enable_dynamic_position values are false. """ class FacetKey(proto.Message): @@ -505,15 +502,15 @@ class FacetKey(proto.Message): For all numerical facet keys that appear in the list of products from the catalog, the - percentiles 0, 10, 30, 50, 70, 90 and 100 are + percentiles 0, 10, 30, 50, 70, 90, and 100 are computed from their distribution weekly. If the model assigns a high score to a numerical facet key and its intervals are not specified in the - search request, these percentiles will become - the bounds for its intervals and will be - returned in the response. If the facet key - intervals are specified in the request, then the - specified intervals will be returned instead. + search request, these percentiles become the + bounds for its intervals and are returned in the + response. If the facet key intervals are + specified in the request, then the specified + intervals are returned instead. restricted_values (MutableSequence[str]): Only get facet for the given restricted values. For example, when using "pickupInStore" as key and set restricted values @@ -547,17 +544,16 @@ class FacetKey(proto.Message): "categories" has three values "Women > Shoe", "Women > Dress" and "Men > Shoe". If set "prefixes" to "Women", the "categories" facet - will give only "Women > Shoe" and "Women > - Dress". Only supported on textual fields. - Maximum is 10. + gives only "Women > Shoe" and "Women > Dress". + Only supported on textual fields. Maximum is 10. contains (MutableSequence[str]): Only get facet values that contains the given strings. For example, suppose "categories" has three values "Women > Shoe", "Women > Dress" and "Men > Shoe". If set "contains" to "Shoe", the - "categories" facet will give only "Women > Shoe" - and "Men > Shoe". Only supported on textual - fields. Maximum is 10. + "categories" facet gives only "Women > Shoe" and + "Men > Shoe". Only supported on textual fields. + Maximum is 10. case_insensitive (bool): True to make facet keys case insensitive when getting faceting values with prefixes or @@ -586,7 +582,7 @@ class FacetKey(proto.Message): [FacetSpec.FacetKey.restricted_values][google.cloud.retail.v2.SearchRequest.FacetSpec.FacetKey.restricted_values]. query (str): The query that is used to compute facet for the given facet - key. When provided, it will override the default behavior of + key. When provided, it overrides the default behavior of facet computation. The query syntax is the same as a filter expression. See [SearchRequest.filter][google.cloud.retail.v2.SearchRequest.filter] @@ -597,9 +593,9 @@ class FacetKey(proto.Message): In the response, [SearchResponse.Facet.values.value][google.cloud.retail.v2.SearchResponse.Facet.FacetValue.value] - will be always "1" and + is always "1" and [SearchResponse.Facet.values.count][google.cloud.retail.v2.SearchResponse.Facet.FacetValue.count] - will be the number of results that match the query. + is the number of results that match the query. For example, you can set a customized facet for "shipToStore", where @@ -607,7 +603,7 @@ class FacetKey(proto.Message): is "customizedShipToStore", and [FacetKey.query][google.cloud.retail.v2.SearchRequest.FacetSpec.FacetKey.query] is "availability: ANY("IN_STOCK") AND shipToStore: - ANY("123")". Then the facet will count the products that are + ANY("123")". Then the facet counts the products that are both in stock and ship to store "123". return_min_max (bool): Returns the min and max value for each @@ -1364,7 +1360,7 @@ def raw_page(self): class ExperimentInfo(proto.Message): - r"""Metadata for active A/B testing [Experiments][]. + r"""Metadata for active A/B testing [Experiment][]. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -1393,8 +1389,8 @@ class ServingConfigExperiment(proto.Message): ``projects/*/locations/*/catalogs/*/servingConfigs/*``. experiment_serving_config (str): The fully qualified resource name of the serving config - [VariantArm.serving_config_id][] responsible for generating - the search response. For example: + [Experiment.VariantArm.serving_config_id][] responsible for + generating the search response. For example: ``projects/*/locations/*/catalogs/*/servingConfigs/*``. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/serving_config.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/serving_config.py index 511bf4240e1b..c911cf8e858b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/serving_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/serving_config.py @@ -233,6 +233,10 @@ class ServingConfig(proto.Message): [solution_types][google.cloud.retail.v2.ServingConfig.solution_types] is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.retail.v2main.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. + ignore_recs_denylist (bool): + When the flag is enabled, the products in the + denylist will not be filtered out in the + recommendation filtering results. personalization_spec (google.cloud.retail_v2.types.SearchRequest.PersonalizationSpec): The specification for personalization spec. @@ -341,6 +345,10 @@ class DiversityType(proto.Enum): proto.STRING, number=16, ) + ignore_recs_denylist: bool = proto.Field( + proto.BOOL, + number=24, + ) personalization_spec: search_service.SearchRequest.PersonalizationSpec = ( proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/types/user_event.py b/packages/google-cloud-retail/google/cloud/retail_v2/types/user_event.py index 66fba74151c2..ade42f1063d6 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/types/user_event.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/types/user_event.py @@ -44,6 +44,7 @@ class UserEvent(proto.Message): Required. User event type. Allowed values are: - ``add-to-cart``: Products being added to cart. + - ``remove-from-cart``: Products being removed from cart. - ``category-page-view``: Special pages such as sale or promotion pages viewed. - ``detail-page-view``: Products detail page viewed. @@ -295,8 +296,8 @@ class UserEvent(proto.Message): The entity for customers that may run multiple different entities, domains, sites or regions, for example, ``Google US``, ``Google Ads``, ``Waymo``, ``google.com``, - ``youtube.com``, etc. It is recommended to set this field to - get better per-entity search, completion and prediction + ``youtube.com``, etc. We recommend that you set this field + to get better per-entity search, completion, and prediction results. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py index 954421dca3be..be8e3e66a81b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py @@ -22,6 +22,7 @@ AnalyticsServiceAsyncClient, AnalyticsServiceClient, ) +from .services.branch_service import BranchServiceAsyncClient, BranchServiceClient from .services.catalog_service import CatalogServiceAsyncClient, CatalogServiceClient from .services.completion_service import ( CompletionServiceAsyncClient, @@ -38,6 +39,7 @@ PredictionServiceClient, ) from .services.product_service import ProductServiceAsyncClient, ProductServiceClient +from .services.project_service import ProjectServiceAsyncClient, ProjectServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient from .services.serving_config_service import ( ServingConfigServiceAsyncClient, @@ -47,6 +49,12 @@ UserEventServiceAsyncClient, UserEventServiceClient, ) +from .types.branch import Branch, BranchView +from .types.branch_service import ( + GetBranchRequest, + ListBranchesRequest, + ListBranchesResponse, +) from .types.catalog import ( AttributesConfig, Catalog, @@ -183,6 +191,20 @@ SetInventoryResponse, UpdateProductRequest, ) +from .types.project import AlertConfig, LoggingConfig, Project +from .types.project_service import ( + AcceptTermsRequest, + EnrollSolutionMetadata, + EnrollSolutionRequest, + EnrollSolutionResponse, + GetAlertConfigRequest, + GetLoggingConfigRequest, + GetProjectRequest, + ListEnrolledSolutionsRequest, + ListEnrolledSolutionsResponse, + UpdateAlertConfigRequest, + UpdateLoggingConfigRequest, +) from .types.promotion import Promotion from .types.purge_config import ( PurgeMetadata, @@ -220,6 +242,7 @@ __all__ = ( "AnalyticsServiceAsyncClient", + "BranchServiceAsyncClient", "CatalogServiceAsyncClient", "CompletionServiceAsyncClient", "ControlServiceAsyncClient", @@ -227,9 +250,11 @@ "ModelServiceAsyncClient", "PredictionServiceAsyncClient", "ProductServiceAsyncClient", + "ProjectServiceAsyncClient", "SearchServiceAsyncClient", "ServingConfigServiceAsyncClient", "UserEventServiceAsyncClient", + "AcceptTermsRequest", "AddCatalogAttributeRequest", "AddControlRequest", "AddFulfillmentPlacesMetadata", @@ -238,6 +263,7 @@ "AddLocalInventoriesMetadata", "AddLocalInventoriesRequest", "AddLocalInventoriesResponse", + "AlertConfig", "AnalyticsServiceClient", "AttributeConfigLevel", "AttributesConfig", @@ -246,6 +272,9 @@ "BatchRemoveCatalogAttributesResponse", "BigQueryOutputResult", "BigQuerySource", + "Branch", + "BranchServiceClient", + "BranchView", "Catalog", "CatalogAttribute", "CatalogServiceClient", @@ -273,6 +302,9 @@ "DeleteModelRequest", "DeleteProductRequest", "DeleteServingConfigRequest", + "EnrollSolutionMetadata", + "EnrollSolutionRequest", + "EnrollSolutionResponse", "ExperimentInfo", "ExportAnalyticsMetricsRequest", "ExportAnalyticsMetricsResponse", @@ -283,13 +315,17 @@ "FulfillmentInfo", "GcsOutputResult", "GcsSource", + "GetAlertConfigRequest", "GetAttributesConfigRequest", + "GetBranchRequest", "GetCompletionConfigRequest", "GetControlRequest", "GetDefaultBranchRequest", "GetDefaultBranchResponse", + "GetLoggingConfigRequest", "GetModelRequest", "GetProductRequest", + "GetProjectRequest", "GetServingConfigRequest", "Image", "ImportCompletionDataRequest", @@ -301,10 +337,14 @@ "ImportUserEventsRequest", "ImportUserEventsResponse", "Interval", + "ListBranchesRequest", + "ListBranchesResponse", "ListCatalogsRequest", "ListCatalogsResponse", "ListControlsRequest", "ListControlsResponse", + "ListEnrolledSolutionsRequest", + "ListEnrolledSolutionsResponse", "ListMerchantCenterAccountLinksRequest", "ListMerchantCenterAccountLinksResponse", "ListModelsRequest", @@ -314,6 +354,7 @@ "ListServingConfigsRequest", "ListServingConfigsResponse", "LocalInventory", + "LoggingConfig", "MerchantCenterAccountLink", "MerchantCenterAccountLinkServiceClient", "MerchantCenterFeedFilter", @@ -334,6 +375,8 @@ "ProductInputConfig", "ProductLevelConfig", "ProductServiceClient", + "Project", + "ProjectServiceClient", "Promotion", "PurchaseTransaction", "PurgeMetadata", @@ -373,10 +416,12 @@ "TuneModelMetadata", "TuneModelRequest", "TuneModelResponse", + "UpdateAlertConfigRequest", "UpdateAttributesConfigRequest", "UpdateCatalogRequest", "UpdateCompletionConfigRequest", "UpdateControlRequest", + "UpdateLoggingConfigRequest", "UpdateModelRequest", "UpdateProductRequest", "UpdateServingConfigRequest", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json index d4859b9820be..ae379f6da49b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json @@ -39,6 +39,55 @@ } } }, + "BranchService": { + "clients": { + "grpc": { + "libraryClient": "BranchServiceClient", + "rpcs": { + "GetBranch": { + "methods": [ + "get_branch" + ] + }, + "ListBranches": { + "methods": [ + "list_branches" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BranchServiceAsyncClient", + "rpcs": { + "GetBranch": { + "methods": [ + "get_branch" + ] + }, + "ListBranches": { + "methods": [ + "list_branches" + ] + } + } + }, + "rest": { + "libraryClient": "BranchServiceClient", + "rpcs": { + "GetBranch": { + "methods": [ + "get_branch" + ] + }, + "ListBranches": { + "methods": [ + "list_branches" + ] + } + } + } + } + }, "CatalogService": { "clients": { "grpc": { @@ -817,6 +866,145 @@ } } }, + "ProjectService": { + "clients": { + "grpc": { + "libraryClient": "ProjectServiceClient", + "rpcs": { + "AcceptTerms": { + "methods": [ + "accept_terms" + ] + }, + "EnrollSolution": { + "methods": [ + "enroll_solution" + ] + }, + "GetAlertConfig": { + "methods": [ + "get_alert_config" + ] + }, + "GetLoggingConfig": { + "methods": [ + "get_logging_config" + ] + }, + "GetProject": { + "methods": [ + "get_project" + ] + }, + "ListEnrolledSolutions": { + "methods": [ + "list_enrolled_solutions" + ] + }, + "UpdateAlertConfig": { + "methods": [ + "update_alert_config" + ] + }, + "UpdateLoggingConfig": { + "methods": [ + "update_logging_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ProjectServiceAsyncClient", + "rpcs": { + "AcceptTerms": { + "methods": [ + "accept_terms" + ] + }, + "EnrollSolution": { + "methods": [ + "enroll_solution" + ] + }, + "GetAlertConfig": { + "methods": [ + "get_alert_config" + ] + }, + "GetLoggingConfig": { + "methods": [ + "get_logging_config" + ] + }, + "GetProject": { + "methods": [ + "get_project" + ] + }, + "ListEnrolledSolutions": { + "methods": [ + "list_enrolled_solutions" + ] + }, + "UpdateAlertConfig": { + "methods": [ + "update_alert_config" + ] + }, + "UpdateLoggingConfig": { + "methods": [ + "update_logging_config" + ] + } + } + }, + "rest": { + "libraryClient": "ProjectServiceClient", + "rpcs": { + "AcceptTerms": { + "methods": [ + "accept_terms" + ] + }, + "EnrollSolution": { + "methods": [ + "enroll_solution" + ] + }, + "GetAlertConfig": { + "methods": [ + "get_alert_config" + ] + }, + "GetLoggingConfig": { + "methods": [ + "get_logging_config" + ] + }, + "GetProject": { + "methods": [ + "get_project" + ] + }, + "ListEnrolledSolutions": { + "methods": [ + "list_enrolled_solutions" + ] + }, + "UpdateAlertConfig": { + "methods": [ + "update_alert_config" + ] + }, + "UpdateLoggingConfig": { + "methods": [ + "update_logging_config" + ] + } + } + } + } + }, "SearchService": { "clients": { "grpc": { diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py index 558c8aab67c5..785067d93b3c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/__init__.py new file mode 100644 index 000000000000..5e716a164744 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BranchServiceAsyncClient +from .client import BranchServiceClient + +__all__ = ( + "BranchServiceClient", + "BranchServiceAsyncClient", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py new file mode 100644 index 000000000000..60ff49f4751c --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py @@ -0,0 +1,621 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import branch, branch_service + +from .client import BranchServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BranchServiceTransport +from .transports.grpc_asyncio import BranchServiceGrpcAsyncIOTransport + + +class BranchServiceAsyncClient: + """Service for [Branch][google.cloud.retail.v2alpha.Branch] Management + + [Branch][google.cloud.retail.v2alpha.Branch]es are automatically + created when a [Catalog][google.cloud.retail.v2alpha.Catalog] is + created. There are fixed three branches in each catalog, and may use + [ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method to get the details of all branches. + """ + + _client: BranchServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BranchServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BranchServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BranchServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BranchServiceClient._DEFAULT_UNIVERSE + + branch_path = staticmethod(BranchServiceClient.branch_path) + parse_branch_path = staticmethod(BranchServiceClient.parse_branch_path) + catalog_path = staticmethod(BranchServiceClient.catalog_path) + parse_catalog_path = staticmethod(BranchServiceClient.parse_catalog_path) + product_path = staticmethod(BranchServiceClient.product_path) + parse_product_path = staticmethod(BranchServiceClient.parse_product_path) + common_billing_account_path = staticmethod( + BranchServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BranchServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BranchServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + BranchServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BranchServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BranchServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(BranchServiceClient.common_project_path) + parse_common_project_path = staticmethod( + BranchServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(BranchServiceClient.common_location_path) + parse_common_location_path = staticmethod( + BranchServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BranchServiceAsyncClient: The constructed client. + """ + return BranchServiceClient.from_service_account_info.__func__(BranchServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BranchServiceAsyncClient: The constructed client. + """ + return BranchServiceClient.from_service_account_file.__func__(BranchServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BranchServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BranchServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BranchServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(BranchServiceClient).get_transport_class, type(BranchServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, BranchServiceTransport, Callable[..., BranchServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the branch service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BranchServiceTransport,Callable[..., BranchServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BranchServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BranchServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_branches( + self, + request: Optional[Union[branch_service.ListBranchesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> branch_service.ListBranchesResponse: + r"""Lists all [Branch][google.cloud.retail.v2alpha.Branch]s under + the specified parent + [Catalog][google.cloud.retail.v2alpha.Catalog]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_list_branches(): + # Create a client + client = retail_v2alpha.BranchServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListBranchesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_branches(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.ListBranchesRequest, dict]]): + The request object. Request for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + parent (:class:`str`): + Required. The parent catalog resource + name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.ListBranchesResponse: + Response for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, branch_service.ListBranchesRequest): + request = branch_service.ListBranchesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_branches + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_branch( + self, + request: Optional[Union[branch_service.GetBranchRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> branch.Branch: + r"""Retrieves a [Branch][google.cloud.retail.v2alpha.Branch]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_get_branch(): + # Create a client + client = retail_v2alpha.BranchServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetBranchRequest( + name="name_value", + ) + + # Make the request + response = await client.get_branch(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.GetBranchRequest, dict]]): + The request object. Request for + [BranchService.GetBranch][google.cloud.retail.v2alpha.BranchService.GetBranch] + method. + name (:class:`str`): + Required. The name of the branch to retrieve. Format: + ``projects/*/locations/global/catalogs/default_catalog/branches/some_branch_id``. + + "default_branch" can be used as a special branch_id, it + returns the default branch that has been set for the + catalog. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.Branch: + A data branch that stores + [Product][google.cloud.retail.v2alpha.Product]s. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, branch_service.GetBranchRequest): + request = branch_service.GetBranchRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_branch + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BranchServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BranchServiceAsyncClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py new file mode 100644 index 000000000000..d470e68b0e9b --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py @@ -0,0 +1,1086 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import branch, branch_service + +from .transports.base import DEFAULT_CLIENT_INFO, BranchServiceTransport +from .transports.grpc import BranchServiceGrpcTransport +from .transports.grpc_asyncio import BranchServiceGrpcAsyncIOTransport +from .transports.rest import BranchServiceRestTransport + + +class BranchServiceClientMeta(type): + """Metaclass for the BranchService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[BranchServiceTransport]] + _transport_registry["grpc"] = BranchServiceGrpcTransport + _transport_registry["grpc_asyncio"] = BranchServiceGrpcAsyncIOTransport + _transport_registry["rest"] = BranchServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BranchServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BranchServiceClient(metaclass=BranchServiceClientMeta): + """Service for [Branch][google.cloud.retail.v2alpha.Branch] Management + + [Branch][google.cloud.retail.v2alpha.Branch]es are automatically + created when a [Catalog][google.cloud.retail.v2alpha.Catalog] is + created. There are fixed three branches in each catalog, and may use + [ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method to get the details of all branches. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "retail.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "retail.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BranchServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BranchServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BranchServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BranchServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def branch_path( + project: str, + location: str, + catalog: str, + branch: str, + ) -> str: + """Returns a fully-qualified branch string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/branches/{branch}".format( + project=project, + location=location, + catalog=catalog, + branch=branch, + ) + + @staticmethod + def parse_branch_path(path: str) -> Dict[str, str]: + """Parses a branch path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/branches/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def catalog_path( + project: str, + location: str, + catalog: str, + ) -> str: + """Returns a fully-qualified catalog string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}".format( + project=project, + location=location, + catalog=catalog, + ) + + @staticmethod + def parse_catalog_path(path: str) -> Dict[str, str]: + """Parses a catalog path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def product_path( + project: str, + location: str, + catalog: str, + branch: str, + product: str, + ) -> str: + """Returns a fully-qualified product string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/branches/{branch}/products/{product}".format( + project=project, + location=location, + catalog=catalog, + branch=branch, + product=product, + ) + + @staticmethod + def parse_product_path(path: str) -> Dict[str, str]: + """Parses a product path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/branches/(?P.+?)/products/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BranchServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BranchServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BranchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BranchServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = BranchServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or BranchServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, BranchServiceTransport, Callable[..., BranchServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the branch service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BranchServiceTransport,Callable[..., BranchServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BranchServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BranchServiceClient._read_environment_variables() + self._client_cert_source = BranchServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = BranchServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BranchServiceTransport) + if transport_provided: + # transport is a BranchServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BranchServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or BranchServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BranchServiceTransport], Callable[..., BranchServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BranchServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_branches( + self, + request: Optional[Union[branch_service.ListBranchesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> branch_service.ListBranchesResponse: + r"""Lists all [Branch][google.cloud.retail.v2alpha.Branch]s under + the specified parent + [Catalog][google.cloud.retail.v2alpha.Catalog]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_list_branches(): + # Create a client + client = retail_v2alpha.BranchServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListBranchesRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_branches(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.ListBranchesRequest, dict]): + The request object. Request for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + parent (str): + Required. The parent catalog resource + name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.ListBranchesResponse: + Response for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, branch_service.ListBranchesRequest): + request = branch_service.ListBranchesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_branches] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_branch( + self, + request: Optional[Union[branch_service.GetBranchRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> branch.Branch: + r"""Retrieves a [Branch][google.cloud.retail.v2alpha.Branch]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_get_branch(): + # Create a client + client = retail_v2alpha.BranchServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetBranchRequest( + name="name_value", + ) + + # Make the request + response = client.get_branch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.GetBranchRequest, dict]): + The request object. Request for + [BranchService.GetBranch][google.cloud.retail.v2alpha.BranchService.GetBranch] + method. + name (str): + Required. The name of the branch to retrieve. Format: + ``projects/*/locations/global/catalogs/default_catalog/branches/some_branch_id``. + + "default_branch" can be used as a special branch_id, it + returns the default branch that has been set for the + catalog. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.Branch: + A data branch that stores + [Product][google.cloud.retail.v2alpha.Product]s. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, branch_service.GetBranchRequest): + request = branch_service.GetBranchRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_branch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BranchServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BranchServiceClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/__init__.py new file mode 100644 index 000000000000..0eaca8040150 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BranchServiceTransport +from .grpc import BranchServiceGrpcTransport +from .grpc_asyncio import BranchServiceGrpcAsyncIOTransport +from .rest import BranchServiceRestInterceptor, BranchServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BranchServiceTransport]] +_transport_registry["grpc"] = BranchServiceGrpcTransport +_transport_registry["grpc_asyncio"] = BranchServiceGrpcAsyncIOTransport +_transport_registry["rest"] = BranchServiceRestTransport + +__all__ = ( + "BranchServiceTransport", + "BranchServiceGrpcTransport", + "BranchServiceGrpcAsyncIOTransport", + "BranchServiceRestTransport", + "BranchServiceRestInterceptor", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/base.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/base.py new file mode 100644 index 000000000000..151eac982526 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/base.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version +from google.cloud.retail_v2alpha.types import branch, branch_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class BranchServiceTransport(abc.ABC): + """Abstract transport class for BranchService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "retail.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_branches: gapic_v1.method.wrap_method( + self.list_branches, + default_timeout=None, + client_info=client_info, + ), + self.get_branch: gapic_v1.method.wrap_method( + self.get_branch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_branches( + self, + ) -> Callable[ + [branch_service.ListBranchesRequest], + Union[ + branch_service.ListBranchesResponse, + Awaitable[branch_service.ListBranchesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_branch( + self, + ) -> Callable[ + [branch_service.GetBranchRequest], + Union[branch.Branch, Awaitable[branch.Branch]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BranchServiceTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/grpc.py new file mode 100644 index 000000000000..6cf1954cfbf1 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/grpc.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.retail_v2alpha.types import branch, branch_service + +from .base import DEFAULT_CLIENT_INFO, BranchServiceTransport + + +class BranchServiceGrpcTransport(BranchServiceTransport): + """gRPC backend transport for BranchService. + + Service for [Branch][google.cloud.retail.v2alpha.Branch] Management + + [Branch][google.cloud.retail.v2alpha.Branch]es are automatically + created when a [Catalog][google.cloud.retail.v2alpha.Catalog] is + created. There are fixed three branches in each catalog, and may use + [ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method to get the details of all branches. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_branches( + self, + ) -> Callable[ + [branch_service.ListBranchesRequest], branch_service.ListBranchesResponse + ]: + r"""Return a callable for the list branches method over gRPC. + + Lists all [Branch][google.cloud.retail.v2alpha.Branch]s under + the specified parent + [Catalog][google.cloud.retail.v2alpha.Catalog]. + + Returns: + Callable[[~.ListBranchesRequest], + ~.ListBranchesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_branches" not in self._stubs: + self._stubs["list_branches"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.BranchService/ListBranches", + request_serializer=branch_service.ListBranchesRequest.serialize, + response_deserializer=branch_service.ListBranchesResponse.deserialize, + ) + return self._stubs["list_branches"] + + @property + def get_branch(self) -> Callable[[branch_service.GetBranchRequest], branch.Branch]: + r"""Return a callable for the get branch method over gRPC. + + Retrieves a [Branch][google.cloud.retail.v2alpha.Branch]. + + Returns: + Callable[[~.GetBranchRequest], + ~.Branch]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_branch" not in self._stubs: + self._stubs["get_branch"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.BranchService/GetBranch", + request_serializer=branch_service.GetBranchRequest.serialize, + response_deserializer=branch.Branch.deserialize, + ) + return self._stubs["get_branch"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BranchServiceGrpcTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..0b883d737551 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/grpc_asyncio.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.retail_v2alpha.types import branch, branch_service + +from .base import DEFAULT_CLIENT_INFO, BranchServiceTransport +from .grpc import BranchServiceGrpcTransport + + +class BranchServiceGrpcAsyncIOTransport(BranchServiceTransport): + """gRPC AsyncIO backend transport for BranchService. + + Service for [Branch][google.cloud.retail.v2alpha.Branch] Management + + [Branch][google.cloud.retail.v2alpha.Branch]es are automatically + created when a [Catalog][google.cloud.retail.v2alpha.Catalog] is + created. There are fixed three branches in each catalog, and may use + [ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method to get the details of all branches. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_branches( + self, + ) -> Callable[ + [branch_service.ListBranchesRequest], + Awaitable[branch_service.ListBranchesResponse], + ]: + r"""Return a callable for the list branches method over gRPC. + + Lists all [Branch][google.cloud.retail.v2alpha.Branch]s under + the specified parent + [Catalog][google.cloud.retail.v2alpha.Catalog]. + + Returns: + Callable[[~.ListBranchesRequest], + Awaitable[~.ListBranchesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_branches" not in self._stubs: + self._stubs["list_branches"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.BranchService/ListBranches", + request_serializer=branch_service.ListBranchesRequest.serialize, + response_deserializer=branch_service.ListBranchesResponse.deserialize, + ) + return self._stubs["list_branches"] + + @property + def get_branch( + self, + ) -> Callable[[branch_service.GetBranchRequest], Awaitable[branch.Branch]]: + r"""Return a callable for the get branch method over gRPC. + + Retrieves a [Branch][google.cloud.retail.v2alpha.Branch]. + + Returns: + Callable[[~.GetBranchRequest], + Awaitable[~.Branch]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_branch" not in self._stubs: + self._stubs["get_branch"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.BranchService/GetBranch", + request_serializer=branch_service.GetBranchRequest.serialize, + response_deserializer=branch.Branch.deserialize, + ) + return self._stubs["get_branch"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_branches: gapic_v1.method_async.wrap_method( + self.list_branches, + default_timeout=None, + client_info=client_info, + ), + self.get_branch: gapic_v1.method_async.wrap_method( + self.get_branch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("BranchServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/rest.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/rest.py new file mode 100644 index 000000000000..d5989f6296d4 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/transports/rest.py @@ -0,0 +1,643 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import branch, branch_service + +from .base import BranchServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BranchServiceRestInterceptor: + """Interceptor for BranchService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BranchServiceRestTransport. + + .. code-block:: python + class MyCustomBranchServiceInterceptor(BranchServiceRestInterceptor): + def pre_get_branch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_branch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_branches(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_branches(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BranchServiceRestTransport(interceptor=MyCustomBranchServiceInterceptor()) + client = BranchServiceClient(transport=transport) + + + """ + + def pre_get_branch( + self, + request: branch_service.GetBranchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[branch_service.GetBranchRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_branch + + Override in a subclass to manipulate the request or metadata + before they are sent to the BranchService server. + """ + return request, metadata + + def post_get_branch(self, response: branch.Branch) -> branch.Branch: + """Post-rpc interceptor for get_branch + + Override in a subclass to manipulate the response + after it is returned by the BranchService server but before + it is returned to user code. + """ + return response + + def pre_list_branches( + self, + request: branch_service.ListBranchesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[branch_service.ListBranchesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_branches + + Override in a subclass to manipulate the request or metadata + before they are sent to the BranchService server. + """ + return request, metadata + + def post_list_branches( + self, response: branch_service.ListBranchesResponse + ) -> branch_service.ListBranchesResponse: + """Post-rpc interceptor for list_branches + + Override in a subclass to manipulate the response + after it is returned by the BranchService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BranchService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BranchService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BranchService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BranchService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BranchServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BranchServiceRestInterceptor + + +class BranchServiceRestTransport(BranchServiceTransport): + """REST backend transport for BranchService. + + Service for [Branch][google.cloud.retail.v2alpha.Branch] Management + + [Branch][google.cloud.retail.v2alpha.Branch]es are automatically + created when a [Catalog][google.cloud.retail.v2alpha.Catalog] is + created. There are fixed three branches in each catalog, and may use + [ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method to get the details of all branches. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BranchServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BranchServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetBranch(BranchServiceRestStub): + def __hash__(self): + return hash("GetBranch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: branch_service.GetBranchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> branch.Branch: + r"""Call the get branch method over HTTP. + + Args: + request (~.branch_service.GetBranchRequest): + The request object. Request for + [BranchService.GetBranch][google.cloud.retail.v2alpha.BranchService.GetBranch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.branch.Branch: + A data branch that stores + [Product][google.cloud.retail.v2alpha.Product]s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*}", + }, + ] + request, metadata = self._interceptor.pre_get_branch(request, metadata) + pb_request = branch_service.GetBranchRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = branch.Branch() + pb_resp = branch.Branch.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_branch(resp) + return resp + + class _ListBranches(BranchServiceRestStub): + def __hash__(self): + return hash("ListBranches") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: branch_service.ListBranchesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> branch_service.ListBranchesResponse: + r"""Call the list branches method over HTTP. + + Args: + request (~.branch_service.ListBranchesRequest): + The request object. Request for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.branch_service.ListBranchesResponse: + Response for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{parent=projects/*/locations/*/catalogs/*}/branches", + }, + ] + request, metadata = self._interceptor.pre_list_branches(request, metadata) + pb_request = branch_service.ListBranchesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = branch_service.ListBranchesResponse() + pb_resp = branch_service.ListBranchesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_branches(resp) + return resp + + @property + def get_branch(self) -> Callable[[branch_service.GetBranchRequest], branch.Branch]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBranch(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_branches( + self, + ) -> Callable[ + [branch_service.ListBranchesRequest], branch_service.ListBranchesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBranches(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(BranchServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/places/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(BranchServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BranchServiceRestTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py index b9d96f6b83ec..a502b766a910 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py @@ -346,7 +346,7 @@ async def sample_list_merchant_center_account_links(): parent (:class:`str`): Required. The parent Catalog of the resource. It must match this format: - projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID} + ``projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -478,7 +478,7 @@ async def sample_create_merchant_center_account_link(): parent (:class:`str`): Required. The branch resource where this MerchantCenterAccountLink will be created. Format: - projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}} + ``projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -507,9 +507,9 @@ async def sample_create_merchant_center_account_link(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.retail_v2alpha.types.MerchantCenterAccountLink` Represents a link between a Merchant Center account and a branch. - Once a link is established, products from the linked - merchant center account will be streamed to the - linked branch. + After a link is established, products from the linked + Merchant Center account are streamed to the linked + branch. """ # Create or coerce a protobuf request object. @@ -623,7 +623,7 @@ async def sample_delete_merchant_center_account_link(): method. name (:class:`str`): Required. Full resource name. Format: - projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id} + ``projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py index 4c5ca94c77c5..7b018ce55405 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py @@ -786,7 +786,7 @@ def sample_list_merchant_center_account_links(): parent (str): Required. The parent Catalog of the resource. It must match this format: - projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID} + ``projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -917,7 +917,7 @@ def sample_create_merchant_center_account_link(): parent (str): Required. The branch resource where this MerchantCenterAccountLink will be created. Format: - projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}} + ``projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -946,9 +946,9 @@ def sample_create_merchant_center_account_link(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.retail_v2alpha.types.MerchantCenterAccountLink` Represents a link between a Merchant Center account and a branch. - Once a link is established, products from the linked - merchant center account will be streamed to the - linked branch. + After a link is established, products from the linked + Merchant Center account are streamed to the linked + branch. """ # Create or coerce a protobuf request object. @@ -1061,7 +1061,7 @@ def sample_delete_merchant_center_account_link(): method. name (str): Required. Full resource name. Format: - projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id} + ``projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py index 3fb0d23ab89f..8fe2f92602da 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py @@ -1435,10 +1435,11 @@ async def add_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -1598,10 +1599,11 @@ async def remove_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py index 7a1a3001b2ab..012ce1b461ea 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py @@ -1859,10 +1859,11 @@ def add_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -2019,10 +2020,11 @@ def remove_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc.py index 6805e45464e4..103e0998c8e7 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc.py @@ -552,10 +552,11 @@ def add_fulfillment_places( ]: r"""Return a callable for the add fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -610,10 +611,11 @@ def remove_fulfillment_places( ]: r"""Return a callable for the remove fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc_asyncio.py index 4e8983f2c6f4..4b646db79285 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/transports/grpc_asyncio.py @@ -570,10 +570,11 @@ def add_fulfillment_places( ]: r"""Return a callable for the add fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2alpha.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -629,10 +630,11 @@ def remove_fulfillment_places( ]: r"""Return a callable for the remove fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/__init__.py new file mode 100644 index 000000000000..245aea0dfd81 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ProjectServiceAsyncClient +from .client import ProjectServiceClient + +__all__ = ( + "ProjectServiceClient", + "ProjectServiceAsyncClient", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py new file mode 100644 index 000000000000..1755111bee4e --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py @@ -0,0 +1,1322 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import common +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service + +from .client import ProjectServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .transports.grpc_asyncio import ProjectServiceGrpcAsyncIOTransport + + +class ProjectServiceAsyncClient: + """Service for settings at Project level.""" + + _client: ProjectServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProjectServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProjectServiceClient._DEFAULT_UNIVERSE + + alert_config_path = staticmethod(ProjectServiceClient.alert_config_path) + parse_alert_config_path = staticmethod(ProjectServiceClient.parse_alert_config_path) + logging_config_path = staticmethod(ProjectServiceClient.logging_config_path) + parse_logging_config_path = staticmethod( + ProjectServiceClient.parse_logging_config_path + ) + retail_project_path = staticmethod(ProjectServiceClient.retail_project_path) + parse_retail_project_path = staticmethod( + ProjectServiceClient.parse_retail_project_path + ) + common_billing_account_path = staticmethod( + ProjectServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProjectServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProjectServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ProjectServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ProjectServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ProjectServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProjectServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ProjectServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ProjectServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ProjectServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceAsyncClient: The constructed client. + """ + return ProjectServiceClient.from_service_account_info.__func__(ProjectServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceAsyncClient: The constructed client. + """ + return ProjectServiceClient.from_service_account_file.__func__(ProjectServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProjectServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProjectServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProjectServiceTransport, Callable[..., ProjectServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the project service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProjectServiceTransport,Callable[..., ProjectServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProjectServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_project( + self, + request: Optional[Union[project_service.GetProjectRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.Project: + r"""Gets the project. + + Throws ``NOT_FOUND`` if the project wasn't initialized for the + Retail API service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_get_project(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetProjectRequest( + name="name_value", + ) + + # Make the request + response = await client.get_project(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.GetProjectRequest, dict]]): + The request object. Request for GetProject method. + name (:class:`str`): + Required. Full resource name of the project. Format: + ``projects/{project_number_or_id}/retailProject`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.Project: + Metadata that describes a Cloud + Retail Project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.GetProjectRequest): + request = project_service.GetProjectRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_project + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def accept_terms( + self, + request: Optional[Union[project_service.AcceptTermsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcr_project.Project: + r"""Accepts service terms for this project. + By making requests to this API, you agree to the terms + of service linked below. + https://cloud.google.com/retail/data-use-terms + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_accept_terms(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.AcceptTermsRequest( + project="project_value", + ) + + # Make the request + response = await client.accept_terms(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.AcceptTermsRequest, dict]]): + The request object. Request for AcceptTerms method. + project (:class:`str`): + Required. Full resource name of the project. Format: + ``projects/{project_number_or_id}/retailProject`` + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.Project: + Metadata that describes a Cloud + Retail Project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.AcceptTermsRequest): + request = project_service.AcceptTermsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.accept_terms + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enroll_solution( + self, + request: Optional[Union[project_service.EnrollSolutionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""The method enrolls a solution of type [Retail + Search][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_SEARCH] + into a project. + + The [Recommendations AI solution + type][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + is enrolled by default when your project enables Retail API, so + you don't need to call the enrollSolution method for + recommendations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_enroll_solution(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.EnrollSolutionRequest( + project="project_value", + solution="SOLUTION_TYPE_SEARCH", + ) + + # Make the request + operation = client.enroll_solution(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.EnrollSolutionRequest, dict]]): + The request object. Request for EnrollSolution method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.retail_v2alpha.types.EnrollSolutionResponse` + Response for EnrollSolution method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.EnrollSolutionRequest): + request = project_service.EnrollSolutionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enroll_solution + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + project_service.EnrollSolutionResponse, + metadata_type=project_service.EnrollSolutionMetadata, + ) + + # Done; return the response. + return response + + async def list_enrolled_solutions( + self, + request: Optional[ + Union[project_service.ListEnrolledSolutionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project_service.ListEnrolledSolutionsResponse: + r"""Lists all the retail API solutions the project has + enrolled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_list_enrolled_solutions(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListEnrolledSolutionsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_enrolled_solutions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.ListEnrolledSolutionsRequest, dict]]): + The request object. Request for ListEnrolledSolutions + method. + parent (:class:`str`): + Required. Full resource name of parent. Format: + ``projects/{project_number_or_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.ListEnrolledSolutionsResponse: + Response for ListEnrolledSolutions + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.ListEnrolledSolutionsRequest): + request = project_service.ListEnrolledSolutionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_enrolled_solutions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_logging_config( + self, + request: Optional[Union[project_service.GetLoggingConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.LoggingConfig: + r"""Gets the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_get_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetLoggingConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_logging_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.GetLoggingConfigRequest, dict]]): + The request object. Request for + [ProjectService.GetLoggingConfig][google.cloud.retail.v2alpha.ProjectService.GetLoggingConfig] + method. + name (:class:`str`): + Required. Full LoggingConfig resource name. Format: + projects/{project_number}/loggingConfig + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.LoggingConfig: + Project level logging config to + control what level of log will be + generated and written to Cloud Logging. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.GetLoggingConfigRequest): + request = project_service.GetLoggingConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_logging_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_logging_config( + self, + request: Optional[ + Union[project_service.UpdateLoggingConfigRequest, dict] + ] = None, + *, + logging_config: Optional[project.LoggingConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.LoggingConfig: + r"""Updates the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_update_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + logging_config = retail_v2alpha.LoggingConfig() + logging_config.name = "name_value" + + request = retail_v2alpha.UpdateLoggingConfigRequest( + logging_config=logging_config, + ) + + # Make the request + response = await client.update_logging_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.UpdateLoggingConfigRequest, dict]]): + The request object. Request for + [ProjectService.UpdateLoggingConfig][google.cloud.retail.v2alpha.ProjectService.UpdateLoggingConfig] + method. + logging_config (:class:`google.cloud.retail_v2alpha.types.LoggingConfig`): + Required. The + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update. + + If the caller does not have permission to update the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig], + then a PERMISSION_DENIED error is returned. + + If the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``logging_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update. The following are the only supported fields: + + - [LoggingConfig.default_log_generation_rule][google.cloud.retail.v2alpha.LoggingConfig.default_log_generation_rule] + - [LoggingConfig.service_log_generation_rules][google.cloud.retail.v2alpha.LoggingConfig.service_log_generation_rules] + + If not set, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.LoggingConfig: + Project level logging config to + control what level of log will be + generated and written to Cloud Logging. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([logging_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.UpdateLoggingConfigRequest): + request = project_service.UpdateLoggingConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if logging_config is not None: + request.logging_config = logging_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_logging_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("logging_config.name", request.logging_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_alert_config( + self, + request: Optional[Union[project_service.GetAlertConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.AlertConfig: + r"""Get the [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + of the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_get_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetAlertConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_alert_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.GetAlertConfigRequest, dict]]): + The request object. Request for + [ProjectService.GetAlertConfig][google.cloud.retail.v2alpha.ProjectService.GetAlertConfig] + method. + name (:class:`str`): + Required. Full AlertConfig resource name. Format: + projects/{project_number}/alertConfig + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.AlertConfig: + Project level alert config. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.GetAlertConfigRequest): + request = project_service.GetAlertConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_alert_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_alert_config( + self, + request: Optional[Union[project_service.UpdateAlertConfigRequest, dict]] = None, + *, + alert_config: Optional[project.AlertConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.AlertConfig: + r"""Update the alert config of the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_update_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + alert_config = retail_v2alpha.AlertConfig() + alert_config.name = "name_value" + + request = retail_v2alpha.UpdateAlertConfigRequest( + alert_config=alert_config, + ) + + # Make the request + response = await client.update_alert_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.UpdateAlertConfigRequest, dict]]): + The request object. Request for + [ProjectService.UpdateAlertConfig][google.cloud.retail.v2alpha.ProjectService.UpdateAlertConfig] + method. + alert_config (:class:`google.cloud.retail_v2alpha.types.AlertConfig`): + Required. The + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + to update. + + If the caller does not have permission to update the + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig], + then a PERMISSION_DENIED error is returned. + + If the + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``alert_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + to update. If not set, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.AlertConfig: + Project level alert config. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([alert_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.UpdateAlertConfigRequest): + request = project_service.UpdateAlertConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if alert_config is not None: + request.alert_config = alert_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_alert_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("alert_config.name", request.alert_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ProjectServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProjectServiceAsyncClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py new file mode 100644 index 000000000000..00e50534f649 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py @@ -0,0 +1,1741 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import common +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service + +from .transports.base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .transports.grpc import ProjectServiceGrpcTransport +from .transports.grpc_asyncio import ProjectServiceGrpcAsyncIOTransport +from .transports.rest import ProjectServiceRestTransport + + +class ProjectServiceClientMeta(type): + """Metaclass for the ProjectService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ProjectServiceTransport]] + _transport_registry["grpc"] = ProjectServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ProjectServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ProjectServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProjectServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProjectServiceClient(metaclass=ProjectServiceClientMeta): + """Service for settings at Project level.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "retail.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "retail.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProjectServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def alert_config_path( + project: str, + ) -> str: + """Returns a fully-qualified alert_config string.""" + return "projects/{project}/alertConfig".format( + project=project, + ) + + @staticmethod + def parse_alert_config_path(path: str) -> Dict[str, str]: + """Parses a alert_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/alertConfig$", path) + return m.groupdict() if m else {} + + @staticmethod + def logging_config_path( + project: str, + ) -> str: + """Returns a fully-qualified logging_config string.""" + return "projects/{project}/loggingConfig".format( + project=project, + ) + + @staticmethod + def parse_logging_config_path(path: str) -> Dict[str, str]: + """Parses a logging_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/loggingConfig$", path) + return m.groupdict() if m else {} + + @staticmethod + def retail_project_path( + project: str, + ) -> str: + """Returns a fully-qualified retail_project string.""" + return "projects/{project}/retailProject".format( + project=project, + ) + + @staticmethod + def parse_retail_project_path(path: str) -> Dict[str, str]: + """Parses a retail_project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/retailProject$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProjectServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProjectServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProjectServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProjectServiceTransport, Callable[..., ProjectServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the project service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProjectServiceTransport,Callable[..., ProjectServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProjectServiceClient._read_environment_variables() + self._client_cert_source = ProjectServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProjectServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProjectServiceTransport) + if transport_provided: + # transport is a ProjectServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProjectServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ProjectServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProjectServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_project( + self, + request: Optional[Union[project_service.GetProjectRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.Project: + r"""Gets the project. + + Throws ``NOT_FOUND`` if the project wasn't initialized for the + Retail API service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_get_project(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetProjectRequest( + name="name_value", + ) + + # Make the request + response = client.get_project(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.GetProjectRequest, dict]): + The request object. Request for GetProject method. + name (str): + Required. Full resource name of the project. Format: + ``projects/{project_number_or_id}/retailProject`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.Project: + Metadata that describes a Cloud + Retail Project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.GetProjectRequest): + request = project_service.GetProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_project] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def accept_terms( + self, + request: Optional[Union[project_service.AcceptTermsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcr_project.Project: + r"""Accepts service terms for this project. + By making requests to this API, you agree to the terms + of service linked below. + https://cloud.google.com/retail/data-use-terms + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_accept_terms(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.AcceptTermsRequest( + project="project_value", + ) + + # Make the request + response = client.accept_terms(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.AcceptTermsRequest, dict]): + The request object. Request for AcceptTerms method. + project (str): + Required. Full resource name of the project. Format: + ``projects/{project_number_or_id}/retailProject`` + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.Project: + Metadata that describes a Cloud + Retail Project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.AcceptTermsRequest): + request = project_service.AcceptTermsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.accept_terms] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enroll_solution( + self, + request: Optional[Union[project_service.EnrollSolutionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""The method enrolls a solution of type [Retail + Search][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_SEARCH] + into a project. + + The [Recommendations AI solution + type][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + is enrolled by default when your project enables Retail API, so + you don't need to call the enrollSolution method for + recommendations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_enroll_solution(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.EnrollSolutionRequest( + project="project_value", + solution="SOLUTION_TYPE_SEARCH", + ) + + # Make the request + operation = client.enroll_solution(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.EnrollSolutionRequest, dict]): + The request object. Request for EnrollSolution method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.retail_v2alpha.types.EnrollSolutionResponse` + Response for EnrollSolution method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.EnrollSolutionRequest): + request = project_service.EnrollSolutionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enroll_solution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + project_service.EnrollSolutionResponse, + metadata_type=project_service.EnrollSolutionMetadata, + ) + + # Done; return the response. + return response + + def list_enrolled_solutions( + self, + request: Optional[ + Union[project_service.ListEnrolledSolutionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project_service.ListEnrolledSolutionsResponse: + r"""Lists all the retail API solutions the project has + enrolled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_list_enrolled_solutions(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListEnrolledSolutionsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_enrolled_solutions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.ListEnrolledSolutionsRequest, dict]): + The request object. Request for ListEnrolledSolutions + method. + parent (str): + Required. Full resource name of parent. Format: + ``projects/{project_number_or_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.ListEnrolledSolutionsResponse: + Response for ListEnrolledSolutions + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.ListEnrolledSolutionsRequest): + request = project_service.ListEnrolledSolutionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_enrolled_solutions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_logging_config( + self, + request: Optional[Union[project_service.GetLoggingConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.LoggingConfig: + r"""Gets the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_get_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetLoggingConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_logging_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.GetLoggingConfigRequest, dict]): + The request object. Request for + [ProjectService.GetLoggingConfig][google.cloud.retail.v2alpha.ProjectService.GetLoggingConfig] + method. + name (str): + Required. Full LoggingConfig resource name. Format: + projects/{project_number}/loggingConfig + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.LoggingConfig: + Project level logging config to + control what level of log will be + generated and written to Cloud Logging. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.GetLoggingConfigRequest): + request = project_service.GetLoggingConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_logging_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_logging_config( + self, + request: Optional[ + Union[project_service.UpdateLoggingConfigRequest, dict] + ] = None, + *, + logging_config: Optional[project.LoggingConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.LoggingConfig: + r"""Updates the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_update_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + logging_config = retail_v2alpha.LoggingConfig() + logging_config.name = "name_value" + + request = retail_v2alpha.UpdateLoggingConfigRequest( + logging_config=logging_config, + ) + + # Make the request + response = client.update_logging_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.UpdateLoggingConfigRequest, dict]): + The request object. Request for + [ProjectService.UpdateLoggingConfig][google.cloud.retail.v2alpha.ProjectService.UpdateLoggingConfig] + method. + logging_config (google.cloud.retail_v2alpha.types.LoggingConfig): + Required. The + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update. + + If the caller does not have permission to update the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig], + then a PERMISSION_DENIED error is returned. + + If the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``logging_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update. The following are the only supported fields: + + - [LoggingConfig.default_log_generation_rule][google.cloud.retail.v2alpha.LoggingConfig.default_log_generation_rule] + - [LoggingConfig.service_log_generation_rules][google.cloud.retail.v2alpha.LoggingConfig.service_log_generation_rules] + + If not set, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.LoggingConfig: + Project level logging config to + control what level of log will be + generated and written to Cloud Logging. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([logging_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.UpdateLoggingConfigRequest): + request = project_service.UpdateLoggingConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if logging_config is not None: + request.logging_config = logging_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_logging_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("logging_config.name", request.logging_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_alert_config( + self, + request: Optional[Union[project_service.GetAlertConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.AlertConfig: + r"""Get the [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + of the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_get_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetAlertConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_alert_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.GetAlertConfigRequest, dict]): + The request object. Request for + [ProjectService.GetAlertConfig][google.cloud.retail.v2alpha.ProjectService.GetAlertConfig] + method. + name (str): + Required. Full AlertConfig resource name. Format: + projects/{project_number}/alertConfig + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.AlertConfig: + Project level alert config. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.GetAlertConfigRequest): + request = project_service.GetAlertConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_alert_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_alert_config( + self, + request: Optional[Union[project_service.UpdateAlertConfigRequest, dict]] = None, + *, + alert_config: Optional[project.AlertConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.AlertConfig: + r"""Update the alert config of the requested project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_update_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + alert_config = retail_v2alpha.AlertConfig() + alert_config.name = "name_value" + + request = retail_v2alpha.UpdateAlertConfigRequest( + alert_config=alert_config, + ) + + # Make the request + response = client.update_alert_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.UpdateAlertConfigRequest, dict]): + The request object. Request for + [ProjectService.UpdateAlertConfig][google.cloud.retail.v2alpha.ProjectService.UpdateAlertConfig] + method. + alert_config (google.cloud.retail_v2alpha.types.AlertConfig): + Required. The + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + to update. + + If the caller does not have permission to update the + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig], + then a PERMISSION_DENIED error is returned. + + If the + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``alert_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + to update. If not set, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.retail_v2alpha.types.AlertConfig: + Project level alert config. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([alert_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, project_service.UpdateAlertConfigRequest): + request = project_service.UpdateAlertConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if alert_config is not None: + request.alert_config = alert_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_alert_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("alert_config.name", request.alert_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ProjectServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProjectServiceClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/__init__.py new file mode 100644 index 000000000000..bfc15c764467 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProjectServiceTransport +from .grpc import ProjectServiceGrpcTransport +from .grpc_asyncio import ProjectServiceGrpcAsyncIOTransport +from .rest import ProjectServiceRestInterceptor, ProjectServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ProjectServiceTransport]] +_transport_registry["grpc"] = ProjectServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ProjectServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ProjectServiceRestTransport + +__all__ = ( + "ProjectServiceTransport", + "ProjectServiceGrpcTransport", + "ProjectServiceGrpcAsyncIOTransport", + "ProjectServiceRestTransport", + "ProjectServiceRestInterceptor", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/base.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/base.py new file mode 100644 index 000000000000..8c333736ca16 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/base.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProjectServiceTransport(abc.ABC): + """Abstract transport class for ProjectService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "retail.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_project: gapic_v1.method.wrap_method( + self.get_project, + default_timeout=None, + client_info=client_info, + ), + self.accept_terms: gapic_v1.method.wrap_method( + self.accept_terms, + default_timeout=None, + client_info=client_info, + ), + self.enroll_solution: gapic_v1.method.wrap_method( + self.enroll_solution, + default_timeout=None, + client_info=client_info, + ), + self.list_enrolled_solutions: gapic_v1.method.wrap_method( + self.list_enrolled_solutions, + default_timeout=None, + client_info=client_info, + ), + self.get_logging_config: gapic_v1.method.wrap_method( + self.get_logging_config, + default_timeout=None, + client_info=client_info, + ), + self.update_logging_config: gapic_v1.method.wrap_method( + self.update_logging_config, + default_timeout=None, + client_info=client_info, + ), + self.get_alert_config: gapic_v1.method.wrap_method( + self.get_alert_config, + default_timeout=None, + client_info=client_info, + ), + self.update_alert_config: gapic_v1.method.wrap_method( + self.update_alert_config, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_project( + self, + ) -> Callable[ + [project_service.GetProjectRequest], + Union[project.Project, Awaitable[project.Project]], + ]: + raise NotImplementedError() + + @property + def accept_terms( + self, + ) -> Callable[ + [project_service.AcceptTermsRequest], + Union[gcr_project.Project, Awaitable[gcr_project.Project]], + ]: + raise NotImplementedError() + + @property + def enroll_solution( + self, + ) -> Callable[ + [project_service.EnrollSolutionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_enrolled_solutions( + self, + ) -> Callable[ + [project_service.ListEnrolledSolutionsRequest], + Union[ + project_service.ListEnrolledSolutionsResponse, + Awaitable[project_service.ListEnrolledSolutionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_logging_config( + self, + ) -> Callable[ + [project_service.GetLoggingConfigRequest], + Union[project.LoggingConfig, Awaitable[project.LoggingConfig]], + ]: + raise NotImplementedError() + + @property + def update_logging_config( + self, + ) -> Callable[ + [project_service.UpdateLoggingConfigRequest], + Union[project.LoggingConfig, Awaitable[project.LoggingConfig]], + ]: + raise NotImplementedError() + + @property + def get_alert_config( + self, + ) -> Callable[ + [project_service.GetAlertConfigRequest], + Union[project.AlertConfig, Awaitable[project.AlertConfig]], + ]: + raise NotImplementedError() + + @property + def update_alert_config( + self, + ) -> Callable[ + [project_service.UpdateAlertConfigRequest], + Union[project.AlertConfig, Awaitable[project.AlertConfig]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProjectServiceTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/grpc.py new file mode 100644 index 000000000000..d8fdd4e2b496 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/grpc.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service + +from .base import DEFAULT_CLIENT_INFO, ProjectServiceTransport + + +class ProjectServiceGrpcTransport(ProjectServiceTransport): + """gRPC backend transport for ProjectService. + + Service for settings at Project level. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_project( + self, + ) -> Callable[[project_service.GetProjectRequest], project.Project]: + r"""Return a callable for the get project method over gRPC. + + Gets the project. + + Throws ``NOT_FOUND`` if the project wasn't initialized for the + Retail API service. + + Returns: + Callable[[~.GetProjectRequest], + ~.Project]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_project" not in self._stubs: + self._stubs["get_project"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/GetProject", + request_serializer=project_service.GetProjectRequest.serialize, + response_deserializer=project.Project.deserialize, + ) + return self._stubs["get_project"] + + @property + def accept_terms( + self, + ) -> Callable[[project_service.AcceptTermsRequest], gcr_project.Project]: + r"""Return a callable for the accept terms method over gRPC. + + Accepts service terms for this project. + By making requests to this API, you agree to the terms + of service linked below. + https://cloud.google.com/retail/data-use-terms + + Returns: + Callable[[~.AcceptTermsRequest], + ~.Project]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "accept_terms" not in self._stubs: + self._stubs["accept_terms"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/AcceptTerms", + request_serializer=project_service.AcceptTermsRequest.serialize, + response_deserializer=gcr_project.Project.deserialize, + ) + return self._stubs["accept_terms"] + + @property + def enroll_solution( + self, + ) -> Callable[[project_service.EnrollSolutionRequest], operations_pb2.Operation]: + r"""Return a callable for the enroll solution method over gRPC. + + The method enrolls a solution of type [Retail + Search][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_SEARCH] + into a project. + + The [Recommendations AI solution + type][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + is enrolled by default when your project enables Retail API, so + you don't need to call the enrollSolution method for + recommendations. + + Returns: + Callable[[~.EnrollSolutionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enroll_solution" not in self._stubs: + self._stubs["enroll_solution"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/EnrollSolution", + request_serializer=project_service.EnrollSolutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enroll_solution"] + + @property + def list_enrolled_solutions( + self, + ) -> Callable[ + [project_service.ListEnrolledSolutionsRequest], + project_service.ListEnrolledSolutionsResponse, + ]: + r"""Return a callable for the list enrolled solutions method over gRPC. + + Lists all the retail API solutions the project has + enrolled. + + Returns: + Callable[[~.ListEnrolledSolutionsRequest], + ~.ListEnrolledSolutionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_enrolled_solutions" not in self._stubs: + self._stubs["list_enrolled_solutions"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/ListEnrolledSolutions", + request_serializer=project_service.ListEnrolledSolutionsRequest.serialize, + response_deserializer=project_service.ListEnrolledSolutionsResponse.deserialize, + ) + return self._stubs["list_enrolled_solutions"] + + @property + def get_logging_config( + self, + ) -> Callable[[project_service.GetLoggingConfigRequest], project.LoggingConfig]: + r"""Return a callable for the get logging config method over gRPC. + + Gets the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + Returns: + Callable[[~.GetLoggingConfigRequest], + ~.LoggingConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_logging_config" not in self._stubs: + self._stubs["get_logging_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/GetLoggingConfig", + request_serializer=project_service.GetLoggingConfigRequest.serialize, + response_deserializer=project.LoggingConfig.deserialize, + ) + return self._stubs["get_logging_config"] + + @property + def update_logging_config( + self, + ) -> Callable[[project_service.UpdateLoggingConfigRequest], project.LoggingConfig]: + r"""Return a callable for the update logging config method over gRPC. + + Updates the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + Returns: + Callable[[~.UpdateLoggingConfigRequest], + ~.LoggingConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_logging_config" not in self._stubs: + self._stubs["update_logging_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/UpdateLoggingConfig", + request_serializer=project_service.UpdateLoggingConfigRequest.serialize, + response_deserializer=project.LoggingConfig.deserialize, + ) + return self._stubs["update_logging_config"] + + @property + def get_alert_config( + self, + ) -> Callable[[project_service.GetAlertConfigRequest], project.AlertConfig]: + r"""Return a callable for the get alert config method over gRPC. + + Get the [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + of the requested project. + + Returns: + Callable[[~.GetAlertConfigRequest], + ~.AlertConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_alert_config" not in self._stubs: + self._stubs["get_alert_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/GetAlertConfig", + request_serializer=project_service.GetAlertConfigRequest.serialize, + response_deserializer=project.AlertConfig.deserialize, + ) + return self._stubs["get_alert_config"] + + @property + def update_alert_config( + self, + ) -> Callable[[project_service.UpdateAlertConfigRequest], project.AlertConfig]: + r"""Return a callable for the update alert config method over gRPC. + + Update the alert config of the requested project. + + Returns: + Callable[[~.UpdateAlertConfigRequest], + ~.AlertConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_alert_config" not in self._stubs: + self._stubs["update_alert_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/UpdateAlertConfig", + request_serializer=project_service.UpdateAlertConfigRequest.serialize, + response_deserializer=project.AlertConfig.deserialize, + ) + return self._stubs["update_alert_config"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProjectServiceGrpcTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..0b6be2981e35 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/grpc_asyncio.py @@ -0,0 +1,587 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service + +from .base import DEFAULT_CLIENT_INFO, ProjectServiceTransport +from .grpc import ProjectServiceGrpcTransport + + +class ProjectServiceGrpcAsyncIOTransport(ProjectServiceTransport): + """gRPC AsyncIO backend transport for ProjectService. + + Service for settings at Project level. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_project( + self, + ) -> Callable[[project_service.GetProjectRequest], Awaitable[project.Project]]: + r"""Return a callable for the get project method over gRPC. + + Gets the project. + + Throws ``NOT_FOUND`` if the project wasn't initialized for the + Retail API service. + + Returns: + Callable[[~.GetProjectRequest], + Awaitable[~.Project]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_project" not in self._stubs: + self._stubs["get_project"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/GetProject", + request_serializer=project_service.GetProjectRequest.serialize, + response_deserializer=project.Project.deserialize, + ) + return self._stubs["get_project"] + + @property + def accept_terms( + self, + ) -> Callable[[project_service.AcceptTermsRequest], Awaitable[gcr_project.Project]]: + r"""Return a callable for the accept terms method over gRPC. + + Accepts service terms for this project. + By making requests to this API, you agree to the terms + of service linked below. + https://cloud.google.com/retail/data-use-terms + + Returns: + Callable[[~.AcceptTermsRequest], + Awaitable[~.Project]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "accept_terms" not in self._stubs: + self._stubs["accept_terms"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/AcceptTerms", + request_serializer=project_service.AcceptTermsRequest.serialize, + response_deserializer=gcr_project.Project.deserialize, + ) + return self._stubs["accept_terms"] + + @property + def enroll_solution( + self, + ) -> Callable[ + [project_service.EnrollSolutionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the enroll solution method over gRPC. + + The method enrolls a solution of type [Retail + Search][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_SEARCH] + into a project. + + The [Recommendations AI solution + type][google.cloud.retail.v2alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + is enrolled by default when your project enables Retail API, so + you don't need to call the enrollSolution method for + recommendations. + + Returns: + Callable[[~.EnrollSolutionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enroll_solution" not in self._stubs: + self._stubs["enroll_solution"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/EnrollSolution", + request_serializer=project_service.EnrollSolutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enroll_solution"] + + @property + def list_enrolled_solutions( + self, + ) -> Callable[ + [project_service.ListEnrolledSolutionsRequest], + Awaitable[project_service.ListEnrolledSolutionsResponse], + ]: + r"""Return a callable for the list enrolled solutions method over gRPC. + + Lists all the retail API solutions the project has + enrolled. + + Returns: + Callable[[~.ListEnrolledSolutionsRequest], + Awaitable[~.ListEnrolledSolutionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_enrolled_solutions" not in self._stubs: + self._stubs["list_enrolled_solutions"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/ListEnrolledSolutions", + request_serializer=project_service.ListEnrolledSolutionsRequest.serialize, + response_deserializer=project_service.ListEnrolledSolutionsResponse.deserialize, + ) + return self._stubs["list_enrolled_solutions"] + + @property + def get_logging_config( + self, + ) -> Callable[ + [project_service.GetLoggingConfigRequest], Awaitable[project.LoggingConfig] + ]: + r"""Return a callable for the get logging config method over gRPC. + + Gets the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + Returns: + Callable[[~.GetLoggingConfigRequest], + Awaitable[~.LoggingConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_logging_config" not in self._stubs: + self._stubs["get_logging_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/GetLoggingConfig", + request_serializer=project_service.GetLoggingConfigRequest.serialize, + response_deserializer=project.LoggingConfig.deserialize, + ) + return self._stubs["get_logging_config"] + + @property + def update_logging_config( + self, + ) -> Callable[ + [project_service.UpdateLoggingConfigRequest], Awaitable[project.LoggingConfig] + ]: + r"""Return a callable for the update logging config method over gRPC. + + Updates the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] of + the requested project. + + Returns: + Callable[[~.UpdateLoggingConfigRequest], + Awaitable[~.LoggingConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_logging_config" not in self._stubs: + self._stubs["update_logging_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/UpdateLoggingConfig", + request_serializer=project_service.UpdateLoggingConfigRequest.serialize, + response_deserializer=project.LoggingConfig.deserialize, + ) + return self._stubs["update_logging_config"] + + @property + def get_alert_config( + self, + ) -> Callable[ + [project_service.GetAlertConfigRequest], Awaitable[project.AlertConfig] + ]: + r"""Return a callable for the get alert config method over gRPC. + + Get the [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] + of the requested project. + + Returns: + Callable[[~.GetAlertConfigRequest], + Awaitable[~.AlertConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_alert_config" not in self._stubs: + self._stubs["get_alert_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/GetAlertConfig", + request_serializer=project_service.GetAlertConfigRequest.serialize, + response_deserializer=project.AlertConfig.deserialize, + ) + return self._stubs["get_alert_config"] + + @property + def update_alert_config( + self, + ) -> Callable[ + [project_service.UpdateAlertConfigRequest], Awaitable[project.AlertConfig] + ]: + r"""Return a callable for the update alert config method over gRPC. + + Update the alert config of the requested project. + + Returns: + Callable[[~.UpdateAlertConfigRequest], + Awaitable[~.AlertConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_alert_config" not in self._stubs: + self._stubs["update_alert_config"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.ProjectService/UpdateAlertConfig", + request_serializer=project_service.UpdateAlertConfigRequest.serialize, + response_deserializer=project.AlertConfig.deserialize, + ) + return self._stubs["update_alert_config"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_project: gapic_v1.method_async.wrap_method( + self.get_project, + default_timeout=None, + client_info=client_info, + ), + self.accept_terms: gapic_v1.method_async.wrap_method( + self.accept_terms, + default_timeout=None, + client_info=client_info, + ), + self.enroll_solution: gapic_v1.method_async.wrap_method( + self.enroll_solution, + default_timeout=None, + client_info=client_info, + ), + self.list_enrolled_solutions: gapic_v1.method_async.wrap_method( + self.list_enrolled_solutions, + default_timeout=None, + client_info=client_info, + ), + self.get_logging_config: gapic_v1.method_async.wrap_method( + self.get_logging_config, + default_timeout=None, + client_info=client_info, + ), + self.update_logging_config: gapic_v1.method_async.wrap_method( + self.update_logging_config, + default_timeout=None, + client_info=client_info, + ), + self.get_alert_config: gapic_v1.method_async.wrap_method( + self.get_alert_config, + default_timeout=None, + client_info=client_info, + ), + self.update_alert_config: gapic_v1.method_async.wrap_method( + self.update_alert_config, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ProjectServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/rest.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/rest.py new file mode 100644 index 000000000000..b6b9897ba698 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/transports/rest.py @@ -0,0 +1,1506 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProjectServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProjectServiceRestInterceptor: + """Interceptor for ProjectService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProjectServiceRestTransport. + + .. code-block:: python + class MyCustomProjectServiceInterceptor(ProjectServiceRestInterceptor): + def pre_accept_terms(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_accept_terms(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enroll_solution(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enroll_solution(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_alert_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_alert_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_logging_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_logging_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_project(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_project(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_enrolled_solutions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_enrolled_solutions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_alert_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_alert_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_logging_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_logging_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProjectServiceRestTransport(interceptor=MyCustomProjectServiceInterceptor()) + client = ProjectServiceClient(transport=transport) + + + """ + + def pre_accept_terms( + self, + request: project_service.AcceptTermsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.AcceptTermsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for accept_terms + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_accept_terms(self, response: gcr_project.Project) -> gcr_project.Project: + """Post-rpc interceptor for accept_terms + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_enroll_solution( + self, + request: project_service.EnrollSolutionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.EnrollSolutionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enroll_solution + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_enroll_solution( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enroll_solution + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_get_alert_config( + self, + request: project_service.GetAlertConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.GetAlertConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_alert_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_get_alert_config( + self, response: project.AlertConfig + ) -> project.AlertConfig: + """Post-rpc interceptor for get_alert_config + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_get_logging_config( + self, + request: project_service.GetLoggingConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.GetLoggingConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_logging_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_get_logging_config( + self, response: project.LoggingConfig + ) -> project.LoggingConfig: + """Post-rpc interceptor for get_logging_config + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_get_project( + self, + request: project_service.GetProjectRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.GetProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_project + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_get_project(self, response: project.Project) -> project.Project: + """Post-rpc interceptor for get_project + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_list_enrolled_solutions( + self, + request: project_service.ListEnrolledSolutionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.ListEnrolledSolutionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_enrolled_solutions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_list_enrolled_solutions( + self, response: project_service.ListEnrolledSolutionsResponse + ) -> project_service.ListEnrolledSolutionsResponse: + """Post-rpc interceptor for list_enrolled_solutions + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_update_alert_config( + self, + request: project_service.UpdateAlertConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.UpdateAlertConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_alert_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_update_alert_config( + self, response: project.AlertConfig + ) -> project.AlertConfig: + """Post-rpc interceptor for update_alert_config + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_update_logging_config( + self, + request: project_service.UpdateLoggingConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[project_service.UpdateLoggingConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_logging_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_update_logging_config( + self, response: project.LoggingConfig + ) -> project.LoggingConfig: + """Post-rpc interceptor for update_logging_config + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProjectService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ProjectService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProjectServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProjectServiceRestInterceptor + + +class ProjectServiceRestTransport(ProjectServiceTransport): + """REST backend transport for ProjectService. + + Service for settings at Project level. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProjectServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'retail.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProjectServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/places/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v2alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AcceptTerms(ProjectServiceRestStub): + def __hash__(self): + return hash("AcceptTerms") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.AcceptTermsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcr_project.Project: + r"""Call the accept terms method over HTTP. + + Args: + request (~.project_service.AcceptTermsRequest): + The request object. Request for AcceptTerms method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcr_project.Project: + Metadata that describes a Cloud + Retail Project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2alpha/{project=projects/*/retailProject}:acceptTerms", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_accept_terms(request, metadata) + pb_request = project_service.AcceptTermsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcr_project.Project() + pb_resp = gcr_project.Project.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_accept_terms(resp) + return resp + + class _EnrollSolution(ProjectServiceRestStub): + def __hash__(self): + return hash("EnrollSolution") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.EnrollSolutionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enroll solution method over HTTP. + + Args: + request (~.project_service.EnrollSolutionRequest): + The request object. Request for EnrollSolution method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2alpha/{project=projects/*}:enrollSolution", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enroll_solution(request, metadata) + pb_request = project_service.EnrollSolutionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enroll_solution(resp) + return resp + + class _GetAlertConfig(ProjectServiceRestStub): + def __hash__(self): + return hash("GetAlertConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.GetAlertConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.AlertConfig: + r"""Call the get alert config method over HTTP. + + Args: + request (~.project_service.GetAlertConfigRequest): + The request object. Request for + [ProjectService.GetAlertConfig][google.cloud.retail.v2alpha.ProjectService.GetAlertConfig] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.project.AlertConfig: + Project level alert config. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/alertConfig}", + }, + ] + request, metadata = self._interceptor.pre_get_alert_config( + request, metadata + ) + pb_request = project_service.GetAlertConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = project.AlertConfig() + pb_resp = project.AlertConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_alert_config(resp) + return resp + + class _GetLoggingConfig(ProjectServiceRestStub): + def __hash__(self): + return hash("GetLoggingConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.GetLoggingConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.LoggingConfig: + r"""Call the get logging config method over HTTP. + + Args: + request (~.project_service.GetLoggingConfigRequest): + The request object. Request for + [ProjectService.GetLoggingConfig][google.cloud.retail.v2alpha.ProjectService.GetLoggingConfig] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.project.LoggingConfig: + Project level logging config to + control what level of log will be + generated and written to Cloud Logging. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/loggingConfig}", + }, + ] + request, metadata = self._interceptor.pre_get_logging_config( + request, metadata + ) + pb_request = project_service.GetLoggingConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = project.LoggingConfig() + pb_resp = project.LoggingConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_logging_config(resp) + return resp + + class _GetProject(ProjectServiceRestStub): + def __hash__(self): + return hash("GetProject") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.GetProjectRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.Project: + r"""Call the get project method over HTTP. + + Args: + request (~.project_service.GetProjectRequest): + The request object. Request for GetProject method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.project.Project: + Metadata that describes a Cloud + Retail Project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/retailProject}", + }, + ] + request, metadata = self._interceptor.pre_get_project(request, metadata) + pb_request = project_service.GetProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = project.Project() + pb_resp = project.Project.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_project(resp) + return resp + + class _ListEnrolledSolutions(ProjectServiceRestStub): + def __hash__(self): + return hash("ListEnrolledSolutions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.ListEnrolledSolutionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project_service.ListEnrolledSolutionsResponse: + r"""Call the list enrolled solutions method over HTTP. + + Args: + request (~.project_service.ListEnrolledSolutionsRequest): + The request object. Request for ListEnrolledSolutions + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.project_service.ListEnrolledSolutionsResponse: + Response for ListEnrolledSolutions + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{parent=projects/*}:enrolledSolutions", + }, + ] + request, metadata = self._interceptor.pre_list_enrolled_solutions( + request, metadata + ) + pb_request = project_service.ListEnrolledSolutionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = project_service.ListEnrolledSolutionsResponse() + pb_resp = project_service.ListEnrolledSolutionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_enrolled_solutions(resp) + return resp + + class _UpdateAlertConfig(ProjectServiceRestStub): + def __hash__(self): + return hash("UpdateAlertConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.UpdateAlertConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.AlertConfig: + r"""Call the update alert config method over HTTP. + + Args: + request (~.project_service.UpdateAlertConfigRequest): + The request object. Request for + [ProjectService.UpdateAlertConfig][google.cloud.retail.v2alpha.ProjectService.UpdateAlertConfig] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.project.AlertConfig: + Project level alert config. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2alpha/{alert_config.name=projects/*/alertConfig}", + "body": "alert_config", + }, + ] + request, metadata = self._interceptor.pre_update_alert_config( + request, metadata + ) + pb_request = project_service.UpdateAlertConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = project.AlertConfig() + pb_resp = project.AlertConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_alert_config(resp) + return resp + + class _UpdateLoggingConfig(ProjectServiceRestStub): + def __hash__(self): + return hash("UpdateLoggingConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: project_service.UpdateLoggingConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> project.LoggingConfig: + r"""Call the update logging config method over HTTP. + + Args: + request (~.project_service.UpdateLoggingConfigRequest): + The request object. Request for + [ProjectService.UpdateLoggingConfig][google.cloud.retail.v2alpha.ProjectService.UpdateLoggingConfig] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.project.LoggingConfig: + Project level logging config to + control what level of log will be + generated and written to Cloud Logging. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2alpha/{logging_config.name=projects/*/loggingConfig}", + "body": "logging_config", + }, + ] + request, metadata = self._interceptor.pre_update_logging_config( + request, metadata + ) + pb_request = project_service.UpdateLoggingConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = project.LoggingConfig() + pb_resp = project.LoggingConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_logging_config(resp) + return resp + + @property + def accept_terms( + self, + ) -> Callable[[project_service.AcceptTermsRequest], gcr_project.Project]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AcceptTerms(self._session, self._host, self._interceptor) # type: ignore + + @property + def enroll_solution( + self, + ) -> Callable[[project_service.EnrollSolutionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnrollSolution(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_alert_config( + self, + ) -> Callable[[project_service.GetAlertConfigRequest], project.AlertConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAlertConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_logging_config( + self, + ) -> Callable[[project_service.GetLoggingConfigRequest], project.LoggingConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLoggingConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_project( + self, + ) -> Callable[[project_service.GetProjectRequest], project.Project]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProject(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_enrolled_solutions( + self, + ) -> Callable[ + [project_service.ListEnrolledSolutionsRequest], + project_service.ListEnrolledSolutionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEnrolledSolutions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_alert_config( + self, + ) -> Callable[[project_service.UpdateAlertConfigRequest], project.AlertConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAlertConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_logging_config( + self, + ) -> Callable[[project_service.UpdateLoggingConfigRequest], project.LoggingConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLoggingConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/places/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ProjectServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProjectServiceRestTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py index 9ce2164ddb67..8e2838eb82f6 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .branch import Branch, BranchView +from .branch_service import GetBranchRequest, ListBranchesRequest, ListBranchesResponse from .catalog import ( AttributesConfig, Catalog, @@ -149,6 +151,20 @@ SetInventoryResponse, UpdateProductRequest, ) +from .project import AlertConfig, LoggingConfig, Project +from .project_service import ( + AcceptTermsRequest, + EnrollSolutionMetadata, + EnrollSolutionRequest, + EnrollSolutionResponse, + GetAlertConfigRequest, + GetLoggingConfigRequest, + GetProjectRequest, + ListEnrolledSolutionsRequest, + ListEnrolledSolutionsResponse, + UpdateAlertConfigRequest, + UpdateLoggingConfigRequest, +) from .promotion import Promotion from .purge_config import ( PurgeMetadata, @@ -180,6 +196,11 @@ ) __all__ = ( + "Branch", + "BranchView", + "GetBranchRequest", + "ListBranchesRequest", + "ListBranchesResponse", "AttributesConfig", "Catalog", "CatalogAttribute", @@ -298,6 +319,20 @@ "SetInventoryRequest", "SetInventoryResponse", "UpdateProductRequest", + "AlertConfig", + "LoggingConfig", + "Project", + "AcceptTermsRequest", + "EnrollSolutionMetadata", + "EnrollSolutionRequest", + "EnrollSolutionResponse", + "GetAlertConfigRequest", + "GetLoggingConfigRequest", + "GetProjectRequest", + "ListEnrolledSolutionsRequest", + "ListEnrolledSolutionsResponse", + "UpdateAlertConfigRequest", + "UpdateLoggingConfigRequest", "Promotion", "PurgeMetadata", "PurgeProductsMetadata", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/branch.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/branch.py new file mode 100644 index 000000000000..18bc4a6a677f --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/branch.py @@ -0,0 +1,329 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.retail_v2alpha.types import product + +__protobuf__ = proto.module( + package="google.cloud.retail.v2alpha", + manifest={ + "BranchView", + "Branch", + }, +) + + +class BranchView(proto.Enum): + r"""A view that specifies different level of fields of a + [Branch][google.cloud.retail.v2alpha.Branch] to show in responses. + + Values: + BRANCH_VIEW_UNSPECIFIED (0): + The value when it's unspecified. This + defaults to the BASIC view. + BRANCH_VIEW_BASIC (1): + Includes basic metadata about the branch, but not + statistical fields. See documentation of fields of + [Branch][google.cloud.retail.v2alpha.Branch] to find what + fields are excluded from BASIC view. + BRANCH_VIEW_FULL (2): + Includes all fields of a + [Branch][google.cloud.retail.v2alpha.Branch]. + """ + BRANCH_VIEW_UNSPECIFIED = 0 + BRANCH_VIEW_BASIC = 1 + BRANCH_VIEW_FULL = 2 + + +class Branch(proto.Message): + r"""A data branch that stores + [Product][google.cloud.retail.v2alpha.Product]s. + + Attributes: + name (str): + Immutable. Full resource name of the branch, such as + ``projects/*/locations/global/catalogs/default_catalog/branches/branch_id``. + display_name (str): + Output only. Human readable name of the + branch to display in the UI. + is_default (bool): + Output only. Indicates whether this branch is + set as the default branch of its parent catalog. + last_product_import_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of last import through + [ProductService.ImportProducts][google.cloud.retail.v2alpha.ProductService.ImportProducts]. + Empty value means no import has been made to this branch. + product_count_stats (MutableSequence[google.cloud.retail_v2alpha.types.Branch.ProductCountStatistic]): + Output only. Statistics for number of products in the + branch, provided for different + [scopes][google.cloud.retail.v2alpha.Branch.ProductCountStatistic.ProductCountScope]. + + This field is not populated in [BranchView.BASIC][] view. + quality_metrics (MutableSequence[google.cloud.retail_v2alpha.types.Branch.QualityMetric]): + Output only. The quality metrics measured among products of + this branch. + + See + [QualityMetric.requirement_key][google.cloud.retail.v2alpha.Branch.QualityMetric.requirement_key] + for supported metrics. Metrics could be missing if failed to + retrieve. + + This field is not populated in [BranchView.BASIC][] view. + """ + + class ProductCountStatistic(proto.Message): + r"""A statistic about the number of products in a branch. + + Attributes: + scope (google.cloud.retail_v2alpha.types.Branch.ProductCountStatistic.ProductCountScope): + [ProductCountScope] of the [counts]. + counts (MutableMapping[str, int]): + The number of products in + [scope][google.cloud.retail.v2alpha.Branch.ProductCountStatistic.scope] + broken down into different groups. + + The key is a group representing a set of products, and the + value is the number of products in that group. Note: keys in + this map may change over time. + + Possible keys: + + - "primary-in-stock", products have + [Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY] + type and + [Product.Availability.IN_STOCK][google.cloud.retail.v2alpha.Product.Availability.IN_STOCK] + availability. + + - "primary-out-of-stock", products have + [Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY] + type and + [Product.Availability.OUT_OF_STOCK][google.cloud.retail.v2alpha.Product.Availability.OUT_OF_STOCK] + availability. + + - "primary-preorder", products have + [Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY] + type and + [Product.Availability.PREORDER][google.cloud.retail.v2alpha.Product.Availability.PREORDER] + availability. + + - "primary-backorder", products have + [Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY] + type and + [Product.Availability.BACKORDER][google.cloud.retail.v2alpha.Product.Availability.BACKORDER] + availability. + + - "variant-in-stock", products have + [Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT] + type and + [Product.Availability.IN_STOCK][google.cloud.retail.v2alpha.Product.Availability.IN_STOCK] + availability. + + - "variant-out-of-stock", products have + [Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT] + type and + [Product.Availability.OUT_OF_STOCK][google.cloud.retail.v2alpha.Product.Availability.OUT_OF_STOCK] + availability. + + - "variant-preorder", products have + [Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT] + type and + [Product.Availability.PREORDER][google.cloud.retail.v2alpha.Product.Availability.PREORDER] + availability. + + - "variant-backorder", products have + [Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT] + type and + [Product.Availability.BACKORDER][google.cloud.retail.v2alpha.Product.Availability.BACKORDER] + availability. + + - "price-discounted", products have + [Product.price_info.price] < + [Product.price_info.original_price]. + """ + + class ProductCountScope(proto.Enum): + r"""Scope of what products are included for this count. + + Values: + PRODUCT_COUNT_SCOPE_UNSPECIFIED (0): + Default value for enum. This value is not + used in the API response. + ALL_PRODUCTS (1): + Scope for all existing products in the + branch. Useful for understanding how many + products there are in a branch. + LAST_24_HOUR_UPDATE (2): + Scope for products created or updated in the + last 24 hours. + """ + PRODUCT_COUNT_SCOPE_UNSPECIFIED = 0 + ALL_PRODUCTS = 1 + LAST_24_HOUR_UPDATE = 2 + + scope: "Branch.ProductCountStatistic.ProductCountScope" = proto.Field( + proto.ENUM, + number=1, + enum="Branch.ProductCountStatistic.ProductCountScope", + ) + counts: MutableMapping[str, int] = proto.MapField( + proto.STRING, + proto.INT64, + number=2, + ) + + class QualityMetric(proto.Message): + r"""Metric measured on a group of + [Product][google.cloud.retail.v2alpha.Product]s against a certain + quality requirement. Contains the number of products that pass the + check and the number of products that don't. + + Attributes: + requirement_key (str): + The key that represents a quality requirement rule. + + Supported keys: + + - "has-valid-uri": product has a valid and accessible + [uri][google.cloud.retail.v2alpha.Product.uri]. + + - "available-expire-time-conformance": + [Product.available_time][google.cloud.retail.v2alpha.Product.available_time] + is early than "now", and + [Product.expire_time][google.cloud.retail.v2alpha.Product.expire_time] + is greater than "now". + + - "has-searchable-attributes": product has at least one + [attribute][google.cloud.retail.v2alpha.Product.attributes] + set to searchable. + + - "has-description": product has non-empty + [description][google.cloud.retail.v2alpha.Product.description]. + + - "has-at-least-bigram-title": Product + [title][google.cloud.retail.v2alpha.Product.title] has at + least two words. A comprehensive title helps to improve + search quality. + + - "variant-has-image": the + [variant][google.cloud.retail.v2alpha.Product.Type.VARIANT] + products has at least one + [image][google.cloud.retail.v2alpha.Product.images]. You + may ignore this metric if all your products are at + [primary][google.cloud.retail.v2alpha.Product.Type.PRIMARY] + level. + + - "variant-has-price-info": the + [variant][google.cloud.retail.v2alpha.Product.Type.VARIANT] + products has + [price_info][google.cloud.retail.v2alpha.Product.price_info] + set. You may ignore this metric if all your products are + at + [primary][google.cloud.retail.v2alpha.Product.Type.PRIMARY] + level. + + - "has-publish-time": product has non-empty + [publish_time][google.cloud.retail.v2alpha.Product.publish_time]. + qualified_product_count (int): + Number of products passing the quality + requirement check. We only check searchable + products. + unqualified_product_count (int): + Number of products failing the quality + requirement check. We only check searchable + products. + suggested_quality_percent_threshold (float): + Value from 0 to 100 representing the suggested percentage of + products that meet the quality requirements to get good + search and recommendation performance. 100 \* + (qualified_product_count) / (qualified_product_count + + unqualified_product_count) should be greater or equal to + this suggestion. + unqualified_sample_products (MutableSequence[google.cloud.retail_v2alpha.types.Product]): + A list of a maximum of 100 sample products that do not + qualify for this requirement. + + This field is only populated in the response to + [BranchService.GetBranch][google.cloud.retail.v2alpha.BranchService.GetBranch] + API, and is always empty for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches]. + + Only the following fields are set in the + [Product][google.cloud.retail.v2alpha.Product]. + + - [Product.name][google.cloud.retail.v2alpha.Product.name] + - [Product.id][google.cloud.retail.v2alpha.Product.id] + - [Product.title][google.cloud.retail.v2alpha.Product.title] + """ + + requirement_key: str = proto.Field( + proto.STRING, + number=1, + ) + qualified_product_count: int = proto.Field( + proto.INT32, + number=2, + ) + unqualified_product_count: int = proto.Field( + proto.INT32, + number=3, + ) + suggested_quality_percent_threshold: float = proto.Field( + proto.DOUBLE, + number=4, + ) + unqualified_sample_products: MutableSequence[ + product.Product + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=product.Product, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + is_default: bool = proto.Field( + proto.BOOL, + number=3, + ) + last_product_import_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + product_count_stats: MutableSequence[ProductCountStatistic] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=ProductCountStatistic, + ) + quality_metrics: MutableSequence[QualityMetric] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=QualityMetric, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/branch_service.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/branch_service.py new file mode 100644 index 000000000000..b23ee2c5a44e --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/branch_service.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.retail_v2alpha.types import branch + +__protobuf__ = proto.module( + package="google.cloud.retail.v2alpha", + manifest={ + "ListBranchesRequest", + "ListBranchesResponse", + "GetBranchRequest", + }, +) + + +class ListBranchesRequest(proto.Message): + r"""Request for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + + Attributes: + parent (str): + Required. The parent catalog resource name. + view (google.cloud.retail_v2alpha.types.BranchView): + The view to apply to the returned + [Branch][google.cloud.retail.v2alpha.Branch]. Defaults to + [Branch.BranchView.BASIC] if unspecified. See documentation + of fields of [Branch][google.cloud.retail.v2alpha.Branch] to + find what fields are excluded from BASIC view. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view: branch.BranchView = proto.Field( + proto.ENUM, + number=2, + enum=branch.BranchView, + ) + + +class ListBranchesResponse(proto.Message): + r"""Response for + [BranchService.ListBranches][google.cloud.retail.v2alpha.BranchService.ListBranches] + method. + + Attributes: + branches (MutableSequence[google.cloud.retail_v2alpha.types.Branch]): + The Branches. + """ + + branches: MutableSequence[branch.Branch] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=branch.Branch, + ) + + +class GetBranchRequest(proto.Message): + r"""Request for + [BranchService.GetBranch][google.cloud.retail.v2alpha.BranchService.GetBranch] + method. + + Attributes: + name (str): + Required. The name of the branch to retrieve. Format: + ``projects/*/locations/global/catalogs/default_catalog/branches/some_branch_id``. + + "default_branch" can be used as a special branch_id, it + returns the default branch that has been set for the + catalog. + view (google.cloud.retail_v2alpha.types.BranchView): + The view to apply to the returned + [Branch][google.cloud.retail.v2alpha.Branch]. Defaults to + [Branch.BranchView.BASIC] if unspecified. See documentation + of fields of [Branch][google.cloud.retail.v2alpha.Branch] to + find what fields are excluded from BASIC view. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: branch.BranchView = proto.Field( + proto.ENUM, + number=2, + enum=branch.BranchView, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/catalog.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/catalog.py index 479bec13352f..82ffd028d04c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/catalog.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/catalog.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.retail_v2alpha.types import common, import_config @@ -168,7 +169,10 @@ class CatalogAttribute(proto.Message): faceted, or boosted in [SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search]. - Must be specified, otherwise throws INVALID_FORMAT error. + Must be specified when + [AttributesConfig.attribute_config_level][google.cloud.retail.v2alpha.AttributesConfig.attribute_config_level] + is CATALOG_LEVEL_ATTRIBUTE_CONFIG, otherwise throws + INVALID_FORMAT error. dynamic_facetable_option (google.cloud.retail_v2alpha.types.CatalogAttribute.DynamicFacetableOption): If DYNAMIC_FACETABLE_ENABLED, attribute values are available for dynamic facet. Could only be DYNAMIC_FACETABLE_DISABLED @@ -191,7 +195,10 @@ class CatalogAttribute(proto.Message): as there are no text values associated to numerical attributes. - Must be specified, otherwise throws INVALID_FORMAT error. + Must be specified, when + [AttributesConfig.attribute_config_level][google.cloud.retail.v2alpha.AttributesConfig.attribute_config_level] + is CATALOG_LEVEL_ATTRIBUTE_CONFIG, otherwise throws + INVALID_FORMAT error. recommendations_filtering_option (google.cloud.retail_v2alpha.types.RecommendationsFilteringOption): When [AttributesConfig.attribute_config_level][google.cloud.retail.v2alpha.AttributesConfig.attribute_config_level] @@ -211,6 +218,8 @@ class CatalogAttribute(proto.Message): the search results. If unset, the server behavior defaults to [RETRIEVABLE_DISABLED][google.cloud.retail.v2alpha.CatalogAttribute.RetrievableOption.RETRIEVABLE_DISABLED]. + facet_config (google.cloud.retail_v2alpha.types.CatalogAttribute.FacetConfig): + Contains facet options. """ class AttributeType(proto.Enum): @@ -312,6 +321,214 @@ class RetrievableOption(proto.Enum): RETRIEVABLE_ENABLED = 1 RETRIEVABLE_DISABLED = 2 + class FacetConfig(proto.Message): + r"""Possible options for the facet that corresponds to the + current attribute config. + + Attributes: + facet_intervals (MutableSequence[google.cloud.retail_v2alpha.types.Interval]): + If you don't set the facet + [SearchRequest.FacetSpec.FacetKey.intervals][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.intervals] + in the request to a numerical attribute, then we use the + computed intervals with rounded bounds obtained from all its + product numerical attribute values. The computed intervals + might not be ideal for some attributes. Therefore, we give + you the option to overwrite them with the facet_intervals + field. The maximum of facet intervals per + [CatalogAttribute][google.cloud.retail.v2alpha.CatalogAttribute] + is 40. Each interval must have a lower bound or an upper + bound. If both bounds are provided, then the lower bound + must be smaller or equal than the upper bound. + ignored_facet_values (MutableSequence[google.cloud.retail_v2alpha.types.CatalogAttribute.FacetConfig.IgnoredFacetValues]): + Each instance represents a list of attribute values to + ignore as facet values for a specific time range. The + maximum number of instances per + [CatalogAttribute][google.cloud.retail.v2alpha.CatalogAttribute] + is 25. + merged_facet_values (MutableSequence[google.cloud.retail_v2alpha.types.CatalogAttribute.FacetConfig.MergedFacetValue]): + Each instance replaces a list of facet values by a merged + facet value. If a facet value is not in any list, then it + will stay the same. To avoid conflicts, only paths of length + 1 are accepted. In other words, if "dark_blue" merged into + "BLUE", then the latter can't merge into "blues" because + this would create a path of length 2. The maximum number of + instances of MergedFacetValue per + [CatalogAttribute][google.cloud.retail.v2alpha.CatalogAttribute] + is 100. This feature is available only for textual custom + attributes. + merged_facet (google.cloud.retail_v2alpha.types.CatalogAttribute.FacetConfig.MergedFacet): + Use this field only if you want to merge a + facet key into another facet key. + rerank_config (google.cloud.retail_v2alpha.types.CatalogAttribute.FacetConfig.RerankConfig): + Set this field only if you want to rerank + based on facet values engaged by the user for + the current key. This option is only possible + for custom facetable textual keys. + """ + + class IgnoredFacetValues(proto.Message): + r"""[Facet + values][google.cloud.retail.v2alpha.SearchResponse.Facet.values] to + ignore on [facets][google.cloud.retail.v2alpha.SearchResponse.Facet] + during the specified time range for the given + [SearchResponse.Facet.key][google.cloud.retail.v2alpha.SearchResponse.Facet.key] + attribute. + + Attributes: + values (MutableSequence[str]): + List of facet values to ignore for the + following time range. The facet values are the + same as the attribute values. There is a limit + of 10 values per instance of IgnoredFacetValues. + Each value can have at most 128 characters. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time range for the current list of facet + values to ignore. If multiple time ranges are + specified for an facet value for the current + attribute, consider all of them. If both are + empty, ignore always. If start time and end time + are set, then start time must be before end + time. + If start time is not empty and end time is + empty, then will ignore these facet values after + the start time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If start time is empty and end time is not + empty, then ignore these facet values before end + time. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class MergedFacetValue(proto.Message): + r"""Replaces a set of textual facet values by the same (possibly + different) merged facet value. Each facet value should appear at + most once as a value per + [CatalogAttribute][google.cloud.retail.v2alpha.CatalogAttribute]. + This feature is available only for textual custom attributes. + + Attributes: + values (MutableSequence[str]): + All the facet values that are replaces by the same + [merged_value][google.cloud.retail.v2alpha.CatalogAttribute.FacetConfig.MergedFacetValue.merged_value] + that follows. The maximum number of values per + MergedFacetValue is 25. Each value can have up to 128 + characters. + merged_value (str): + All the previous values are replaced by this merged facet + value. This merged_value must be non-empty and can have up + to 128 characters. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + merged_value: str = proto.Field( + proto.STRING, + number=2, + ) + + class MergedFacet(proto.Message): + r"""The current facet key (i.e. attribute config) maps into the + [merged_facet_key][google.cloud.retail.v2alpha.CatalogAttribute.FacetConfig.MergedFacet.merged_facet_key]. + A facet key can have at most one child. The current facet key and + the merged facet key need both to be textual custom attributes or + both numerical custom attributes (same type). + + Attributes: + merged_facet_key (str): + The merged facet key should be a valid facet + key that is different than the facet key of the + current catalog attribute. We refer this is + merged facet key as the child of the current + catalog attribute. This merged facet key can't + be a parent of another facet key (i.e. no + directed path of length 2). This merged facet + key needs to be either a textual custom + attribute or a numerical custom attribute. + """ + + merged_facet_key: str = proto.Field( + proto.STRING, + number=1, + ) + + class RerankConfig(proto.Message): + r"""Options to rerank based on facet values engaged by the user for the + current key. That key needs to be a custom textual key and + facetable. To use this control, you also need to pass all the facet + keys engaged by the user in the request using the field + [SearchRequest.FacetSpec]. In particular, if you don't pass the + facet keys engaged that you want to rerank on, this control won't be + effective. Moreover, to obtain better results, the facet values that + you want to rerank on should be close to English (ideally made of + words, underscores, and spaces). + + Attributes: + rerank_facet (bool): + If set to true, then we also rerank the + dynamic facets based on the facet values engaged + by the user for the current attribute key during + serving. + facet_values (MutableSequence[str]): + If empty, rerank on all facet values for the + current key. Otherwise, will rerank on the facet + values from this list only. + """ + + rerank_facet: bool = proto.Field( + proto.BOOL, + number=1, + ) + facet_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + facet_intervals: MutableSequence[common.Interval] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common.Interval, + ) + ignored_facet_values: MutableSequence[ + "CatalogAttribute.FacetConfig.IgnoredFacetValues" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CatalogAttribute.FacetConfig.IgnoredFacetValues", + ) + merged_facet_values: MutableSequence[ + "CatalogAttribute.FacetConfig.MergedFacetValue" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CatalogAttribute.FacetConfig.MergedFacetValue", + ) + merged_facet: "CatalogAttribute.FacetConfig.MergedFacet" = proto.Field( + proto.MESSAGE, + number=4, + message="CatalogAttribute.FacetConfig.MergedFacet", + ) + rerank_config: "CatalogAttribute.FacetConfig.RerankConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="CatalogAttribute.FacetConfig.RerankConfig", + ) + key: str = proto.Field( proto.STRING, number=1, @@ -357,6 +574,11 @@ class RetrievableOption(proto.Enum): number=12, enum=RetrievableOption, ) + facet_config: FacetConfig = proto.Field( + proto.MESSAGE, + number=13, + message=FacetConfig, + ) class AttributesConfig(proto.Message): @@ -449,7 +671,7 @@ class CompletionConfig(proto.Message): Can use [GetOperation][google.longrunning.Operations.GetOperation] - API to retrieve the latest state of the Long Running + API method to retrieve the latest state of the Long Running Operation. denylist_input_config (google.cloud.retail_v2alpha.types.CompletionDataInputConfig): Output only. The source data for the latest @@ -526,12 +748,12 @@ class CompletionConfig(proto.Message): class MerchantCenterLink(proto.Message): r"""Represents a link between a Merchant Center account and a - branch. Once a link is established, products from the linked - merchant center account will be streamed to the linked branch. + branch. After a link is established, products from the linked + Merchant Center account are streamed to the linked branch. Attributes: merchant_center_account_id (int): - Required. The linked `Merchant center account + Required. The linked `Merchant Center account ID `__. The account must be a standalone account or a sub-account of a MCA. @@ -542,7 +764,7 @@ class MerchantCenterLink(proto.Message): configured default branch. However, changing the default branch later on won't change the linked branch here. - A single branch ID can only have one linked merchant center + A single branch ID can only have one linked Merchant Center account ID. destinations (MutableSequence[str]): String representing the destination to import for, all if @@ -661,10 +883,10 @@ class Catalog(proto.Message): Required. The product level configuration. merchant_center_linking_config (google.cloud.retail_v2alpha.types.MerchantCenterLinkingConfig): The Merchant Center linking configuration. - Once a link is added, the data stream from + After a link is added, the data stream from Merchant Center to Cloud Retail will be enabled automatically. The requester must have access to - the merchant center account in order to make + the Merchant Center account in order to make changes to this field. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/common.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/common.py index a5d5ef6d6176..5ef0e4c47aac 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/common.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/common.py @@ -137,6 +137,11 @@ class Condition(proto.Message): Range of time(s) specifying when Condition is active. Condition true if any time range matches. + page_categories (MutableSequence[str]): + Used to support browse uses cases. A list (up to 10 entries) + of categories or departments. The format should be the same + as + [UserEvent.page_categories][google.cloud.retail.v2alpha.UserEvent.page_categories]; """ class QueryTerm(proto.Message): @@ -197,6 +202,10 @@ class TimeRange(proto.Message): number=3, message=TimeRange, ) + page_categories: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class Rule(proto.Message): @@ -252,6 +261,16 @@ class Rule(proto.Message): Treats a set of terms as synonyms of one another. + This field is a member of `oneof`_ ``action``. + force_return_facet_action (google.cloud.retail_v2alpha.types.Rule.ForceReturnFacetAction): + Force returns an attribute as a facet in the + request. + + This field is a member of `oneof`_ ``action``. + remove_facet_action (google.cloud.retail_v2alpha.types.Rule.RemoveFacetAction): + Remove an attribute as a facet in the request + (if present). + This field is a member of `oneof`_ ``action``. condition (google.cloud.retail_v2alpha.types.Condition): Required. The condition that triggers the @@ -323,6 +342,7 @@ class FilterAction(proto.Message): provided with the SearchRequest. The AND operator is used to combine the query's existing filters with the filter rule(s). NOTE: May result in 0 results when filters conflict. + - Action Result: Filters the returned objects to be ONLY those that passed the filter. @@ -335,8 +355,8 @@ class FilterAction(proto.Message): must be set. - Filter syntax is identical to [SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter]. - See more details at the Retail Search `user - guide `__. + For more information, see + `Filter `__. - To filter products with product ID "product_1" or "product_2", and color "Red" or "Blue": *(id: ANY("product_1", "product_2"))* *AND* *(colorFamilies: @@ -351,11 +371,8 @@ class FilterAction(proto.Message): class RedirectAction(proto.Message): r"""Redirects a shopper to a specific page. - - Rule Condition: - - - Must specify - [Condition.query_terms][google.cloud.retail.v2alpha.Condition.query_terms]. - + - Rule Condition: Must specify + [Condition.query_terms][google.cloud.retail.v2alpha.Condition.query_terms]. - Action Input: Request Query - Action Result: Redirects shopper to provided uri. @@ -495,6 +512,108 @@ class IgnoreAction(proto.Message): number=1, ) + class ForceReturnFacetAction(proto.Message): + r"""Force returns an attribute/facet in the request around a certain + position or above. + + - Rule Condition: Must specify non-empty + [Condition.query_terms][google.cloud.retail.v2alpha.Condition.query_terms] + (for search only) or + [Condition.page_categories][google.cloud.retail.v2alpha.Condition.page_categories] + (for browse only), but can't specify both. + + - Action Inputs: attribute name, position + + - Action Result: Will force return a facet key around a certain + position or above if the condition is satisfied. + + Example: Suppose the query is "shoes", the + [Condition.query_terms][google.cloud.retail.v2alpha.Condition.query_terms] + is "shoes", the + [ForceReturnFacetAction.FacetPositionAdjustment.attribute_name][google.cloud.retail.v2alpha.Rule.ForceReturnFacetAction.FacetPositionAdjustment.attribute_name] + is "size" and the + [ForceReturnFacetAction.FacetPositionAdjustment.position][google.cloud.retail.v2alpha.Rule.ForceReturnFacetAction.FacetPositionAdjustment.position] + is 8. + + Two cases: a) The facet key "size" is not already in the top 8 + slots, then the facet "size" will appear at a position close to 8. + b) The facet key "size" in among the top 8 positions in the request, + then it will stay at its current rank. + + Attributes: + facet_position_adjustments (MutableSequence[google.cloud.retail_v2alpha.types.Rule.ForceReturnFacetAction.FacetPositionAdjustment]): + Each instance corresponds to a force return + attribute for the given condition. There can't + be more 3 instances here. + """ + + class FacetPositionAdjustment(proto.Message): + r"""Each facet position adjustment consists of a single attribute + name (i.e. facet key) along with a specified position. + + Attributes: + attribute_name (str): + The attribute name to force return as a + facet. Each attribute name should be a valid + attribute name, be non-empty and contain at most + 80 characters long. + position (int): + This is the position in the request as + explained above. It should be strictly positive + be at most 100. + """ + + attribute_name: str = proto.Field( + proto.STRING, + number=1, + ) + position: int = proto.Field( + proto.INT32, + number=2, + ) + + facet_position_adjustments: MutableSequence[ + "Rule.ForceReturnFacetAction.FacetPositionAdjustment" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Rule.ForceReturnFacetAction.FacetPositionAdjustment", + ) + + class RemoveFacetAction(proto.Message): + r"""Removes an attribute/facet in the request if is present. + + - Rule Condition: Must specify non-empty + [Condition.query_terms][google.cloud.retail.v2alpha.Condition.query_terms] + (for search only) or + [Condition.page_categories][google.cloud.retail.v2alpha.Condition.page_categories] + (for browse only), but can't specify both. + + - Action Input: attribute name + + - Action Result: Will remove the attribute (as a facet) from the + request if it is present. + + Example: Suppose the query is "shoes", the + [Condition.query_terms][google.cloud.retail.v2alpha.Condition.query_terms] + is "shoes" and the attribute name "size", then facet key "size" will + be removed from the request (if it is present). + + Attributes: + attribute_names (MutableSequence[str]): + The attribute names (i.e. facet keys) to + remove from the dynamic facets (if present in + the request). There can't be more 3 attribute + names. Each attribute name should be a valid + attribute name, be non-empty and contain at most + 80 characters. + """ + + attribute_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + boost_action: BoostAction = proto.Field( proto.MESSAGE, number=2, @@ -543,6 +662,18 @@ class IgnoreAction(proto.Message): oneof="action", message=TwowaySynonymsAction, ) + force_return_facet_action: ForceReturnFacetAction = proto.Field( + proto.MESSAGE, + number=12, + oneof="action", + message=ForceReturnFacetAction, + ) + remove_facet_action: RemoveFacetAction = proto.Field( + proto.MESSAGE, + number=13, + oneof="action", + message=RemoveFacetAction, + ) condition: "Condition" = proto.Field( proto.MESSAGE, number=1, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/completion_service.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/completion_service.py index f8c1a1868a80..1db0c4f677b5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/completion_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/completion_service.py @@ -112,11 +112,10 @@ class CompleteQueryRequest(proto.Message): This field is only available for "cloud-retail" dataset. entity (str): - The entity for customers that may run multiple different - entities, domains, sites or regions, for example, - ``Google US``, ``Google Ads``, ``Waymo``, ``google.com``, - ``youtube.com``, etc. If this is set, it should be exactly - matched with + The entity for customers who run multiple entities, domains, + sites, or regions, for example, ``Google US``, + ``Google Ads``, ``Waymo``, ``google.com``, ``youtube.com``, + etc. If this is set, it must be an exact match with [UserEvent.entity][google.cloud.retail.v2alpha.UserEvent.entity] to get per-entity autocomplete results. """ @@ -173,10 +172,10 @@ class CompleteQueryResponse(proto.Message): for search events resulting from this completion, which enables accurate attribution of complete model performance. recent_search_results (MutableSequence[google.cloud.retail_v2alpha.types.CompleteQueryResponse.RecentSearchResult]): - Matched recent searches of this user. The maximum number of - recent searches is 10. This field is a restricted feature. - Contact Retail Search support team if you are interested in - enabling it. + Deprecated. Matched recent searches of this user. The + maximum number of recent searches is 10. This field is a + restricted feature. If you want to enable it, contact Retail + Search support. This feature is only available when [CompleteQueryRequest.visitor_id][google.cloud.retail.v2alpha.CompleteQueryRequest.visitor_id] @@ -229,14 +228,16 @@ class CompletionResult(proto.Message): search with this suggestion term for each facet. This is an experimental feature for limited - customers. Please reach out to the support team - if you would like to receive this information. + customers. If you want to receive this facet + information, reach out to the Retail support + team. total_product_count (int): Total number of products associated with a search with this suggestion. This is an experimental feature for limited - customers. Please reach out to the support team - if you would like to receive this information. + customers. If you want to receive this product + count information, reach out to the Retail + support team. """ suggestion: str = proto.Field( @@ -262,7 +263,7 @@ class CompletionResult(proto.Message): ) class RecentSearchResult(proto.Message): - r"""Recent search of this user. + r"""Deprecated: Recent search of this user. Attributes: recent_search (str): diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/import_config.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/import_config.py index 1551bb6b59e9..cfc43d67fe0e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/import_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/import_config.py @@ -276,7 +276,9 @@ class ImportProductsRequest(proto.Message): during the Import. update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided imported ``products`` - to update. If not set, all fields are updated. + to update. If not set, all fields are updated. If provided, + only the existing product fields are updated. Missing + products will not be created. reconciliation_mode (google.cloud.retail_v2alpha.types.ImportProductsRequest.ReconciliationMode): The mode of reconciliation between existing products and the products to be imported. Defaults to @@ -291,10 +293,16 @@ class ImportProductsRequest(proto.Message): ``projects/{project}/topics/{topic}``. It has to be within the same project as [ImportProductsRequest.parent][google.cloud.retail.v2alpha.ImportProductsRequest.parent]. - Make sure that + Make sure that both + ``cloud-retail-customer-data-access@system.gserviceaccount.com`` + and ``service-@gcp-sa-retail.iam.gserviceaccount.com`` - has the ``pubsub.topics.publish`` IAM permission on the + have the ``pubsub.topics.publish`` IAM permission on the topic. + + Only supported when + [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2alpha.ImportProductsRequest.reconciliation_mode] + is set to ``FULL``. skip_default_branch_protection (bool): If true, this performs the FULL import even if it would delete a large proportion of the products in the default diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link.py index a31a499c1ca9..5ce9491edc22 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link.py @@ -31,8 +31,8 @@ class MerchantCenterAccountLink(proto.Message): r"""Represents a link between a Merchant Center account and a - branch. Once a link is established, products from the linked - merchant center account will be streamed to the linked branch. + branch. After a link is established, products from the linked + Merchant Center account are streamed to the linked branch. Attributes: name (str): @@ -53,14 +53,14 @@ class MerchantCenterAccountLink(proto.Message): The account must be a standalone account or a sub-account of a MCA. branch_id (str): - Required. The branch id (e.g. 0/1/2) within the catalog that + Required. The branch ID (e.g. 0/1/2) within the catalog that products from merchant_center_account_id are streamed to. When updating this field, an empty value will use the currently configured default branch. However, changing the default branch later on won't change the linked branch here. - A single branch id can only have one linked merchant center - account id. + A single branch ID can only have one linked Merchant Center + account ID. feed_label (str): The FeedLabel used to perform filtering. Note: this replaces `region_id `__. @@ -86,7 +86,10 @@ class MerchantCenterAccountLink(proto.Message): Output only. Represents the state of the link. project_id (str): - Output only. GCP project ID. + Output only. Google Cloud project ID. + source (str): + Optional. An optional arbitrary string that + could be used as a tag for tracking link source. """ class State(proto.Enum): @@ -165,6 +168,10 @@ class MerchantCenterFeedFilter(proto.Message): proto.STRING, number=9, ) + source: str = proto.Field( + proto.STRING, + number=10, + ) class CreateMerchantCenterAccountLinkMetadata(proto.Message): diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link_service.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link_service.py index 7cafec8cf07d..8d9588cd6977 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/merchant_center_account_link_service.py @@ -43,7 +43,7 @@ class ListMerchantCenterAccountLinksRequest(proto.Message): parent (str): Required. The parent Catalog of the resource. It must match this format: - projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID} + ``projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`` """ parent: str = proto.Field( @@ -80,7 +80,7 @@ class CreateMerchantCenterAccountLinkRequest(proto.Message): parent (str): Required. The branch resource where this MerchantCenterAccountLink will be created. Format: - projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}} + ``projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`` merchant_center_account_link (google.cloud.retail_v2alpha.types.MerchantCenterAccountLink): Required. The [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] @@ -111,7 +111,7 @@ class DeleteMerchantCenterAccountLinkRequest(proto.Message): Attributes: name (str): Required. Full resource name. Format: - projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id} + ``projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id}`` """ name: str = proto.Field( diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/model.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/model.py index 163fec24cf15..ccccb18de456 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/model.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/model.py @@ -157,6 +157,8 @@ class Model(proto.Message): Output only. The list of valid serving configs associated with the PageOptimizationConfig. + model_features_config (google.cloud.retail_v2alpha.types.Model.ModelFeaturesConfig): + Optional. Additional model features config. """ class ServingState(proto.Enum): @@ -242,6 +244,27 @@ class DataState(proto.Enum): DATA_OK = 1 DATA_ERROR = 2 + class ContextProductsType(proto.Enum): + r"""Use single or multiple context products for recommendations. + + Values: + CONTEXT_PRODUCTS_TYPE_UNSPECIFIED (0): + Unspecified default value, should never be explicitly set. + Defaults to + [MULTIPLE_CONTEXT_PRODUCTS][google.cloud.retail.v2alpha.Model.ContextProductsType.MULTIPLE_CONTEXT_PRODUCTS]. + SINGLE_CONTEXT_PRODUCT (1): + Use only a single product as context for the + recommendation. Typically used on pages like + add-to-cart or product details. + MULTIPLE_CONTEXT_PRODUCTS (2): + Use one or multiple products as context for + the recommendation. Typically used on shopping + cart pages. + """ + CONTEXT_PRODUCTS_TYPE_UNSPECIFIED = 0 + SINGLE_CONTEXT_PRODUCT = 1 + MULTIPLE_CONTEXT_PRODUCTS = 2 + class PageOptimizationConfig(proto.Message): r"""The PageOptimizationConfig for model training. @@ -512,6 +535,45 @@ class ServingConfigList(proto.Message): number=1, ) + class FrequentlyBoughtTogetherFeaturesConfig(proto.Message): + r"""Additional configs for the frequently-bought-together model + type. + + Attributes: + context_products_type (google.cloud.retail_v2alpha.types.Model.ContextProductsType): + Optional. Specifies the context of the model when it is used + in predict requests. Can only be set for the + ``frequently-bought-together`` type. If it isn't specified, + it defaults to + [MULTIPLE_CONTEXT_PRODUCTS][google.cloud.retail.v2alpha.Model.ContextProductsType.MULTIPLE_CONTEXT_PRODUCTS]. + """ + + context_products_type: "Model.ContextProductsType" = proto.Field( + proto.ENUM, + number=2, + enum="Model.ContextProductsType", + ) + + class ModelFeaturesConfig(proto.Message): + r"""Additional model features config. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + frequently_bought_together_config (google.cloud.retail_v2alpha.types.Model.FrequentlyBoughtTogetherFeaturesConfig): + Additional configs for + frequently-bought-together models. + + This field is a member of `oneof`_ ``type_dedicated_config``. + """ + + frequently_bought_together_config: "Model.FrequentlyBoughtTogetherFeaturesConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type_dedicated_config", + message="Model.FrequentlyBoughtTogetherFeaturesConfig", + ) + page_optimization_config: PageOptimizationConfig = proto.Field( proto.MESSAGE, number=17, @@ -583,6 +645,11 @@ class ServingConfigList(proto.Message): number=19, message=ServingConfigList, ) + model_features_config: ModelFeaturesConfig = proto.Field( + proto.MESSAGE, + number=22, + message=ModelFeaturesConfig, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/product.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/product.py index 42c105f91350..605ab0503ede 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/product.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/product.py @@ -46,29 +46,25 @@ class Product(proto.Message): Attributes: expire_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp when this product becomes unavailable for - [SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search]. - Note that this is only applicable to - [Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY] - and - [Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION], - and ignored for - [Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT]. + Note that this field is applied in the following ways: + + - If the [Product][google.cloud.retail.v2alpha.Product] is + already expired when it is uploaded, this product is not + indexed for search. + + - If the [Product][google.cloud.retail.v2alpha.Product] is + not expired when it is uploaded, only the + [Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY]'s + and + [Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION]'s + expireTime is respected, and + [Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT]'s + expireTime is not used. + In general, we suggest the users to delete the stale products explicitly, instead of using this field to determine staleness. - If it is set, the - [Product][google.cloud.retail.v2alpha.Product] is not - available for - [SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search] - after - [expire_time][google.cloud.retail.v2alpha.Product.expire_time]. - However, the product can still be retrieved by - [ProductService.GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct] - and - [ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]. - [expire_time][google.cloud.retail.v2alpha.Product.expire_time] must be later than [available_time][google.cloud.retail.v2alpha.Product.available_time] @@ -221,7 +217,8 @@ class Product(proto.Message): INVALID_ARGUMENT error is returned. At most 250 values are allowed per - [Product][google.cloud.retail.v2alpha.Product]. Empty values + [Product][google.cloud.retail.v2alpha.Product] unless + overridden through the Google Cloud console. Empty values are not allowed. Each value must be a UTF-8 encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is returned. @@ -244,10 +241,10 @@ class Product(proto.Message): brands (MutableSequence[str]): The brands of the product. - A maximum of 30 brands are allowed. Each brand must be a - UTF-8 encoded string with a length limit of 1,000 - characters. Otherwise, an INVALID_ARGUMENT error is - returned. + A maximum of 30 brands are allowed unless overridden through + the Google Cloud console. Each brand must be a UTF-8 encoded + string with a length limit of 1,000 characters. Otherwise, + an INVALID_ARGUMENT error is returned. Corresponding properties: Google Merchant Center property `brand `__. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project.py new file mode 100644 index 000000000000..0e8eba624573 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.retail_v2alpha.types import common + +__protobuf__ = proto.module( + package="google.cloud.retail.v2alpha", + manifest={ + "LoggingConfig", + "Project", + "AlertConfig", + }, +) + + +class LoggingConfig(proto.Message): + r"""Project level logging config to control what level of log + will be generated and written to Cloud Logging. + + Attributes: + name (str): + Required. Immutable. The name of the LoggingConfig singleton + resource. Format: `projects/*/loggingConfig` + default_log_generation_rule (google.cloud.retail_v2alpha.types.LoggingConfig.LogGenerationRule): + The log generation rule that applies by default to all + services supporting log generation. It can be overridden by + [ServiceLogGenerationRule][google.cloud.retail.v2alpha.LoggingConfig.ServiceLogGenerationRule] + for service level control. + service_log_generation_rules (MutableSequence[google.cloud.retail_v2alpha.types.LoggingConfig.ServiceLogGenerationRule]): + Controls logging configurations more granularly for each + supported service. + + This overrides the + [default_log_generation_rule][google.cloud.retail.v2alpha.LoggingConfig.default_log_generation_rule] + for the services specified. For those not mentioned, they + will fallback to the default log generation rule. + """ + + class LoggingLevel(proto.Enum): + r"""The setting to control log generation. + + Values: + LOGGING_LEVEL_UNSPECIFIED (0): + Default value. Defaults to ``LOG_FOR_WARNINGS_AND_ABOVE`` if + unset. + LOGGING_DISABLED (1): + No log will be generated and sent to Cloud + Logging. + LOG_ERRORS_AND_ABOVE (2): + Log for operations resulted in fatal error. + LOG_WARNINGS_AND_ABOVE (3): + In addition to ``LOG_ERRORS_AND_ABOVE``, also log for + operations that have soft errors, quality suggestions. + LOG_ALL (4): + Log all operations, including successful + ones. + """ + LOGGING_LEVEL_UNSPECIFIED = 0 + LOGGING_DISABLED = 1 + LOG_ERRORS_AND_ABOVE = 2 + LOG_WARNINGS_AND_ABOVE = 3 + LOG_ALL = 4 + + class LogGenerationRule(proto.Message): + r"""The logging configurations for services supporting log + generation. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + logging_level (google.cloud.retail_v2alpha.types.LoggingConfig.LoggingLevel): + The logging level. + + By default it is set to ``LOG_WARNINGS_AND_ABOVE``. + info_log_sample_rate (float): + The log sample rate for INFO level log entries. You can use + this to reduce the number of entries generated for INFO + level logs. + + DO NOT set this field if the + [logging_level][google.cloud.retail.v2alpha.LoggingConfig.LogGenerationRule.logging_level] + is not + [LoggingLevel.LOG_ALL][google.cloud.retail.v2alpha.LoggingConfig.LoggingLevel.LOG_ALL]. + Otherwise, an INVALID_ARGUMENT error is returned. + + Sample rate for INFO logs defaults to 1 when unset (generate + and send all INFO logs to Cloud Logging). Its value must be + greater than 0 and less than or equal to 1. + + This field is a member of `oneof`_ ``_info_log_sample_rate``. + """ + + logging_level: "LoggingConfig.LoggingLevel" = proto.Field( + proto.ENUM, + number=1, + enum="LoggingConfig.LoggingLevel", + ) + info_log_sample_rate: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class ServiceLogGenerationRule(proto.Message): + r"""The granular logging configurations for supported services. + + Attributes: + service_name (str): + Required. Supported service names: + + "CatalogService", + "CompletionService", + "ControlService", + "MerchantCenterStreaming", + "ModelService", + "PredictionService", + "ProductService", + "ServingConfigService", + "UserEventService", + log_generation_rule (google.cloud.retail_v2alpha.types.LoggingConfig.LogGenerationRule): + The log generation rule that applies to this + service. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + log_generation_rule: "LoggingConfig.LogGenerationRule" = proto.Field( + proto.MESSAGE, + number=3, + message="LoggingConfig.LogGenerationRule", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + default_log_generation_rule: LogGenerationRule = proto.Field( + proto.MESSAGE, + number=2, + message=LogGenerationRule, + ) + service_log_generation_rules: MutableSequence[ + ServiceLogGenerationRule + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ServiceLogGenerationRule, + ) + + +class Project(proto.Message): + r"""Metadata that describes a Cloud Retail Project. + + Attributes: + name (str): + Output only. Full resource name of the retail project, such + as ``projects/{project_id_or_number}/retailProject``. + enrolled_solutions (MutableSequence[google.cloud.retail_v2alpha.types.SolutionType]): + Output only. Retail API solutions that the + project has enrolled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + enrolled_solutions: MutableSequence[common.SolutionType] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=common.SolutionType, + ) + + +class AlertConfig(proto.Message): + r"""Project level alert config. + + Attributes: + name (str): + Required. Immutable. The name of the AlertConfig singleton + resource. Format: `projects/*/alertConfig` + alert_policies (MutableSequence[google.cloud.retail_v2alpha.types.AlertConfig.AlertPolicy]): + Alert policies for a customer. They must be unique by + [AlertPolicy.alert_group] + """ + + class AlertPolicy(proto.Message): + r"""Alert policy for a customer. + + Attributes: + alert_group (str): + The feature that provides alerting capability. Supported + value is only ``search-data-quality`` for now. + enroll_status (google.cloud.retail_v2alpha.types.AlertConfig.AlertPolicy.EnrollStatus): + The enrollment status of a customer. + recipients (MutableSequence[google.cloud.retail_v2alpha.types.AlertConfig.AlertPolicy.Recipient]): + Recipients for the alert policy. + One alert policy should not exceed 20 + recipients. + """ + + class EnrollStatus(proto.Enum): + r"""The enrollment status enum for alert policy. + + Values: + ENROLL_STATUS_UNSPECIFIED (0): + Default value. Used for customers who have + not responded to the alert policy. + ENROLLED (1): + Customer is enrolled in this policy. + DECLINED (2): + Customer declined this policy. + """ + ENROLL_STATUS_UNSPECIFIED = 0 + ENROLLED = 1 + DECLINED = 2 + + class Recipient(proto.Message): + r"""Recipient contact information. + + Attributes: + email_address (str): + Email address of the recipient. + """ + + email_address: str = proto.Field( + proto.STRING, + number=1, + ) + + alert_group: str = proto.Field( + proto.STRING, + number=1, + ) + enroll_status: "AlertConfig.AlertPolicy.EnrollStatus" = proto.Field( + proto.ENUM, + number=2, + enum="AlertConfig.AlertPolicy.EnrollStatus", + ) + recipients: MutableSequence[ + "AlertConfig.AlertPolicy.Recipient" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="AlertConfig.AlertPolicy.Recipient", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + alert_policies: MutableSequence[AlertPolicy] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=AlertPolicy, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project_service.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project_service.py new file mode 100644 index 000000000000..3e7de622b793 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project_service.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.retail_v2alpha.types import common +from google.cloud.retail_v2alpha.types import project as gcr_project + +__protobuf__ = proto.module( + package="google.cloud.retail.v2alpha", + manifest={ + "GetProjectRequest", + "AcceptTermsRequest", + "EnrollSolutionRequest", + "EnrollSolutionResponse", + "EnrollSolutionMetadata", + "ListEnrolledSolutionsRequest", + "ListEnrolledSolutionsResponse", + "GetLoggingConfigRequest", + "UpdateLoggingConfigRequest", + "GetAlertConfigRequest", + "UpdateAlertConfigRequest", + }, +) + + +class GetProjectRequest(proto.Message): + r"""Request for GetProject method. + + Attributes: + name (str): + Required. Full resource name of the project. Format: + ``projects/{project_number_or_id}/retailProject`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AcceptTermsRequest(proto.Message): + r"""Request for AcceptTerms method. + + Attributes: + project (str): + Required. Full resource name of the project. Format: + ``projects/{project_number_or_id}/retailProject`` + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnrollSolutionRequest(proto.Message): + r"""Request for EnrollSolution method. + + Attributes: + project (str): + Required. Full resource name of parent. Format: + ``projects/{project_number_or_id}`` + solution (google.cloud.retail_v2alpha.types.SolutionType): + Required. Solution to enroll. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + solution: common.SolutionType = proto.Field( + proto.ENUM, + number=2, + enum=common.SolutionType, + ) + + +class EnrollSolutionResponse(proto.Message): + r"""Response for EnrollSolution method. + + Attributes: + enrolled_solution (google.cloud.retail_v2alpha.types.SolutionType): + Retail API solution that the project has + enrolled. + """ + + enrolled_solution: common.SolutionType = proto.Field( + proto.ENUM, + number=1, + enum=common.SolutionType, + ) + + +class EnrollSolutionMetadata(proto.Message): + r"""Metadata related to the EnrollSolution method. + This will be returned by the + google.longrunning.Operation.metadata field. + + """ + + +class ListEnrolledSolutionsRequest(proto.Message): + r"""Request for ListEnrolledSolutions method. + + Attributes: + parent (str): + Required. Full resource name of parent. Format: + ``projects/{project_number_or_id}`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEnrolledSolutionsResponse(proto.Message): + r"""Response for ListEnrolledSolutions method. + + Attributes: + enrolled_solutions (MutableSequence[google.cloud.retail_v2alpha.types.SolutionType]): + Retail API solutions that the project has + enrolled. + """ + + enrolled_solutions: MutableSequence[common.SolutionType] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=common.SolutionType, + ) + + +class GetLoggingConfigRequest(proto.Message): + r"""Request for + [ProjectService.GetLoggingConfig][google.cloud.retail.v2alpha.ProjectService.GetLoggingConfig] + method. + + Attributes: + name (str): + Required. Full LoggingConfig resource name. Format: + projects/{project_number}/loggingConfig + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateLoggingConfigRequest(proto.Message): + r"""Request for + [ProjectService.UpdateLoggingConfig][google.cloud.retail.v2alpha.ProjectService.UpdateLoggingConfig] + method. + + Attributes: + logging_config (google.cloud.retail_v2alpha.types.LoggingConfig): + Required. The + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update. + + If the caller does not have permission to update the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig], + then a PERMISSION_DENIED error is returned. + + If the + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update does not exist, a NOT_FOUND error is returned. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [LoggingConfig][google.cloud.retail.v2alpha.LoggingConfig] + to update. The following are the only supported fields: + + - [LoggingConfig.default_log_generation_rule][google.cloud.retail.v2alpha.LoggingConfig.default_log_generation_rule] + - [LoggingConfig.service_log_generation_rules][google.cloud.retail.v2alpha.LoggingConfig.service_log_generation_rules] + + If not set, all supported fields are updated. + """ + + logging_config: gcr_project.LoggingConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gcr_project.LoggingConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetAlertConfigRequest(proto.Message): + r"""Request for + [ProjectService.GetAlertConfig][google.cloud.retail.v2alpha.ProjectService.GetAlertConfig] + method. + + Attributes: + name (str): + Required. Full AlertConfig resource name. Format: + projects/{project_number}/alertConfig + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAlertConfigRequest(proto.Message): + r"""Request for + [ProjectService.UpdateAlertConfig][google.cloud.retail.v2alpha.ProjectService.UpdateAlertConfig] + method. + + Attributes: + alert_config (google.cloud.retail_v2alpha.types.AlertConfig): + Required. The + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] to + update. + + If the caller does not have permission to update the + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig], then + a PERMISSION_DENIED error is returned. + + If the + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] to + update does not exist, a NOT_FOUND error is returned. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [AlertConfig][google.cloud.retail.v2alpha.AlertConfig] to + update. If not set, all supported fields are updated. + """ + + alert_config: gcr_project.AlertConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gcr_project.AlertConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/promotion.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/promotion.py index 0dfb06286c3c..90b02efd1752 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/promotion.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/promotion.py @@ -40,8 +40,8 @@ class Promotion(proto.Message): ID_1_LIKE_THIS. Otherwise, an INVALID_ARGUMENT error is returned. - Google Merchant Center property - `promotion `__. + Corresponds to Google Merchant Center property + `promotion_id `__. """ promotion_id: str = proto.Field( diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/search_service.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/search_service.py index 1740890a6456..f9fc2a076ff0 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/search_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/search_service.py @@ -50,7 +50,7 @@ class SearchRequest(proto.Message): or the name of the legacy placement resource, such as ``projects/*/locations/global/catalogs/default_catalog/placements/default_search``. This field is used to identify the serving config name and - the set of models that will be used to make the search. + the set of models that are used to make the search. branch (str): The branch resource name, such as ``projects/*/locations/global/catalogs/default_catalog/branches/0``. @@ -113,8 +113,8 @@ class SearchRequest(proto.Message): The filter syntax consists of an expression language for constructing a predicate from one or more fields of the products being filtered. Filter expression is - case-sensitive. See more details at this `user - guide `__. + case-sensitive. For more information, see + `Filter `__. If this field is unrecognizable, an INVALID_ARGUMENT is returned. @@ -123,24 +123,22 @@ class SearchRequest(proto.Message): search without checking any filters on the search page. The filter applied to every search request when quality - improvement such as query expansion is needed. For example, - if a query does not have enough results, an expanded query - with - [SearchRequest.canonical_filter][google.cloud.retail.v2alpha.SearchRequest.canonical_filter] - will be returned as a supplement of the original query. This - field is strongly recommended to achieve high search - quality. - - See - [SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter] - for more details about filter syntax. + improvement such as query expansion is needed. In the case a + query does not have a sufficient amount of results this + filter will be used to determine whether or not to enable + the query expansion flow. The original filter will still be + used for the query expanded search. This field is strongly + recommended to achieve high search quality. + + For more information about filter syntax, see + [SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter]. order_by (str): The order in which products are returned. Products can be ordered by a field in an [Product][google.cloud.retail.v2alpha.Product] object. Leave it unset if ordered by relevance. OrderBy expression is - case-sensitive. See more details at this `user - guide `__. + case-sensitive. For more information, see + `Order `__. If this field is unrecognizable, an INVALID_ARGUMENT is returned. @@ -159,9 +157,9 @@ class SearchRequest(proto.Message): facets. Notice that only textual facets can be dynamically generated. boost_spec (google.cloud.retail_v2alpha.types.SearchRequest.BoostSpec): - Boost specification to boost certain products. See more - details at this `user - guide `__. + Boost specification to boost certain products. For more + information, see `Boost + results `__. Notice that if both [ServingConfig.boost_control_ids][google.cloud.retail.v2alpha.ServingConfig.boost_control_ids] @@ -173,18 +171,18 @@ class SearchRequest(proto.Message): boost scores from all matched boost conditions. query_expansion_spec (google.cloud.retail_v2alpha.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the - conditions under which query expansion will occur. See more - details at this `user - guide `__. + conditions under which query expansion occurs. For more + information, see `Query + expansion `__. relevance_threshold (google.cloud.retail_v2alpha.types.SearchRequest.RelevanceThreshold): The relevance threshold of the search results. Defaults to [RelevanceThreshold.HIGH][google.cloud.retail.v2alpha.SearchRequest.RelevanceThreshold.HIGH], which means only the most relevant results are shown, and - the least number of results are returned. See more details - at this `user - guide `__. + the least number of results are returned. For more + information, see `Adjust result + size `__. variant_rollup_keys (MutableSequence[str]): The keys to fetch and rollup the matching [variant][google.cloud.retail.v2alpha.Product.Type.VARIANT] @@ -319,9 +317,9 @@ class SearchRequest(proto.Message): - Keys must start with a lowercase letter or international character. - See `Google Cloud - Document `__ - for more details. + For more information, see `Requirements for + labels `__ + in the Resource Manager documentation. spell_correction_spec (google.cloud.retail_v2alpha.types.SearchRequest.SpellCorrectionSpec): The spell correction specification that specifies the mode under which spell correction @@ -453,18 +451,17 @@ class FacetSpec(proto.Message): enable_dynamic_position (bool): Enables dynamic position for this facet. If set to true, the position of this facet among all facets in the response is - determined by Google Retail Search. It will be ordered - together with dynamic facets if dynamic facets is enabled. - If set to false, the position of this facet in the response - will be the same as in the request, and it will be ranked - before the facets with dynamic position enable and all - dynamic facets. + determined by Google Retail Search. It is ordered together + with dynamic facets if dynamic facets is enabled. If set to + false, the position of this facet in the response is the + same as in the request, and it is ranked before the facets + with dynamic position enable and all dynamic facets. For example, you may always want to have rating facet returned in the response, but it's not necessarily to always display the rating facet at the top. In that case, you can set enable_dynamic_position to true so that the position of - rating facet in response will be determined by Google Retail + rating facet in response is determined by Google Retail Search. Another example, assuming you have the following facets in @@ -476,14 +473,14 @@ class FacetSpec(proto.Message): - "brands", enable_dynamic_position = false - And also you have a dynamic facets enable, which will - generate a facet 'gender'. Then the final order of the - facets in the response can be ("price", "brands", "rating", - "gender") or ("price", "brands", "gender", "rating") depends - on how Google Retail Search orders "gender" and "rating" - facets. However, notice that "price" and "brands" will - always be ranked at 1st and 2nd position since their - enable_dynamic_position are false. + And also you have a dynamic facets enable, which generates a + facet "gender". Then, the final order of the facets in the + response can be ("price", "brands", "rating", "gender") or + ("price", "brands", "gender", "rating") depends on how + Google Retail Search orders "gender" and "rating" facets. + However, notice that "price" and "brands" are always ranked + at first and second position because their + enable_dynamic_position values are false. """ class FacetKey(proto.Message): @@ -543,15 +540,15 @@ class FacetKey(proto.Message): For all numerical facet keys that appear in the list of products from the catalog, the - percentiles 0, 10, 30, 50, 70, 90 and 100 are + percentiles 0, 10, 30, 50, 70, 90, and 100 are computed from their distribution weekly. If the model assigns a high score to a numerical facet key and its intervals are not specified in the - search request, these percentiles will become - the bounds for its intervals and will be - returned in the response. If the facet key - intervals are specified in the request, then the - specified intervals will be returned instead. + search request, these percentiles become the + bounds for its intervals and are returned in the + response. If the facet key intervals are + specified in the request, then the specified + intervals are returned instead. restricted_values (MutableSequence[str]): Only get facet for the given restricted values. For example, when using "pickupInStore" as key and set restricted values @@ -585,17 +582,16 @@ class FacetKey(proto.Message): "categories" has three values "Women > Shoe", "Women > Dress" and "Men > Shoe". If set "prefixes" to "Women", the "categories" facet - will give only "Women > Shoe" and "Women > - Dress". Only supported on textual fields. - Maximum is 10. + gives only "Women > Shoe" and "Women > Dress". + Only supported on textual fields. Maximum is 10. contains (MutableSequence[str]): Only get facet values that contains the given strings. For example, suppose "categories" has three values "Women > Shoe", "Women > Dress" and "Men > Shoe". If set "contains" to "Shoe", the - "categories" facet will give only "Women > Shoe" - and "Men > Shoe". Only supported on textual - fields. Maximum is 10. + "categories" facet gives only "Women > Shoe" and + "Men > Shoe". Only supported on textual fields. + Maximum is 10. case_insensitive (bool): True to make facet keys case insensitive when getting faceting values with prefixes or @@ -624,7 +620,7 @@ class FacetKey(proto.Message): [FacetSpec.FacetKey.restricted_values][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.restricted_values]. query (str): The query that is used to compute facet for the given facet - key. When provided, it will override the default behavior of + key. When provided, it overrides the default behavior of facet computation. The query syntax is the same as a filter expression. See [SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter] @@ -635,9 +631,9 @@ class FacetKey(proto.Message): In the response, [SearchResponse.Facet.values.value][google.cloud.retail.v2alpha.SearchResponse.Facet.FacetValue.value] - will be always "1" and + is always "1" and [SearchResponse.Facet.values.count][google.cloud.retail.v2alpha.SearchResponse.Facet.FacetValue.count] - will be the number of results that match the query. + is the number of results that match the query. For example, you can set a customized facet for "shipToStore", where @@ -645,7 +641,7 @@ class FacetKey(proto.Message): is "customizedShipToStore", and [FacetKey.query][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.query] is "availability: ANY("IN_STOCK") AND shipToStore: - ANY("123")". Then the facet will count the products that are + ANY("123")". Then the facet counts the products that are both in stock and ship to store "123". return_min_max (bool): Returns the min and max value for each @@ -1410,7 +1406,9 @@ def raw_page(self): class ExperimentInfo(proto.Message): - r"""Metadata for active A/B testing [Experiments][]. + r"""Metadata for active A/B testing + [Experiment][google.cloud.retail.v2alpha.Experiment]. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -1439,8 +1437,8 @@ class ServingConfigExperiment(proto.Message): ``projects/*/locations/*/catalogs/*/servingConfigs/*``. experiment_serving_config (str): The fully qualified resource name of the serving config - [VariantArm.serving_config_id][] responsible for generating - the search response. For example: + [Experiment.VariantArm.serving_config_id][google.cloud.retail.v2alpha.Experiment.VariantArm.serving_config_id] + responsible for generating the search response. For example: ``projects/*/locations/*/catalogs/*/servingConfigs/*``. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/serving_config.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/serving_config.py index ddf649a612d3..62e7dc92f026 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/serving_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/serving_config.py @@ -235,6 +235,10 @@ class ServingConfig(proto.Message): [solution_types][google.cloud.retail.v2alpha.ServingConfig.solution_types] is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.retail.v2main.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. + ignore_recs_denylist (bool): + When the flag is enabled, the products in the + denylist will not be filtered out in the + recommendation filtering results. personalization_spec (google.cloud.retail_v2alpha.types.SearchRequest.PersonalizationSpec): The specification for personalization spec. @@ -343,6 +347,10 @@ class DiversityType(proto.Enum): proto.STRING, number=16, ) + ignore_recs_denylist: bool = proto.Field( + proto.BOOL, + number=24, + ) personalization_spec: search_service.SearchRequest.PersonalizationSpec = ( proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/user_event.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/user_event.py index cacb6496f1e7..7c5ef8b61f18 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/user_event.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/user_event.py @@ -44,6 +44,7 @@ class UserEvent(proto.Message): Required. User event type. Allowed values are: - ``add-to-cart``: Products being added to cart. + - ``remove-from-cart``: Products being removed from cart. - ``category-page-view``: Special pages such as sale or promotion pages viewed. - ``detail-page-view``: Products detail page viewed. @@ -295,8 +296,8 @@ class UserEvent(proto.Message): The entity for customers that may run multiple different entities, domains, sites or regions, for example, ``Google US``, ``Google Ads``, ``Waymo``, ``google.com``, - ``youtube.com``, etc. It is recommended to set this field to - get better per-entity search, completion and prediction + ``youtube.com``, etc. We recommend that you set this field + to get better per-entity search, completion, and prediction results. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py index 128970abc926..ce478497840c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py @@ -171,6 +171,9 @@ from .types.promotion import Promotion from .types.purge_config import ( PurgeMetadata, + PurgeProductsMetadata, + PurgeProductsRequest, + PurgeProductsResponse, PurgeUserEventsRequest, PurgeUserEventsResponse, ) @@ -311,6 +314,9 @@ "Promotion", "PurchaseTransaction", "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", "Rating", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json index 39759c99190c..8e2a46be9ba2 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json @@ -594,6 +594,11 @@ "list_products" ] }, + "PurgeProducts": { + "methods": [ + "purge_products" + ] + }, "RemoveFulfillmentPlaces": { "methods": [ "remove_fulfillment_places" @@ -654,6 +659,11 @@ "list_products" ] }, + "PurgeProducts": { + "methods": [ + "purge_products" + ] + }, "RemoveFulfillmentPlaces": { "methods": [ "remove_fulfillment_places" @@ -714,6 +724,11 @@ "list_products" ] }, + "PurgeProducts": { + "methods": [ + "purge_products" + ] + }, "RemoveFulfillmentPlaces": { "methods": [ "remove_fulfillment_places" diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py index 558c8aab67c5..785067d93b3c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py index 00ce917fe5a2..3afeb0d25350 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py @@ -53,10 +53,10 @@ from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.retail_v2beta.services.product_service import pagers +from google.cloud.retail_v2beta.types import product_service, promotion, purge_config from google.cloud.retail_v2beta.types import common, import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service, promotion from .client import ProductServiceClient from .transports.base import DEFAULT_CLIENT_INFO, ProductServiceTransport @@ -931,6 +931,125 @@ async def sample_delete_product(): metadata=metadata, ) + async def purge_products( + self, + request: Optional[Union[purge_config.PurgeProductsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes all selected + [Product][google.cloud.retail.v2beta.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2beta.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2beta.Product]s + may still be returned by + [ProductService.GetProduct][google.cloud.retail.v2beta.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2beta.Product]s, this operation + could take hours to complete. To get a sample of + [Product][google.cloud.retail.v2beta.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2beta.PurgeProductsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2beta + + async def sample_purge_products(): + # Create a client + client = retail_v2beta.ProductServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2beta.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2beta.types.PurgeProductsRequest, dict]]): + The request object. Request message for PurgeProducts + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2beta.types.PurgeProductsResponse` Response of the PurgeProductsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeProductsRequest): + request = purge_config.PurgeProductsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.purge_products + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeProductsResponse, + metadata_type=purge_config.PurgeProductsMetadata, + ) + + # Done; return the response. + return response + async def import_products( self, request: Optional[Union[import_config.ImportProductsRequest, dict]] = None, @@ -1314,10 +1433,11 @@ async def add_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -1477,10 +1597,11 @@ async def remove_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py index 0773662d0bed..aaad2192d6de 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py @@ -58,10 +58,10 @@ from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.retail_v2beta.services.product_service import pagers +from google.cloud.retail_v2beta.types import product_service, promotion, purge_config from google.cloud.retail_v2beta.types import common, import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service, promotion from .transports.base import DEFAULT_CLIENT_INFO, ProductServiceTransport from .transports.grpc import ProductServiceGrpcTransport @@ -1362,6 +1362,123 @@ def sample_delete_product(): metadata=metadata, ) + def purge_products( + self, + request: Optional[Union[purge_config.PurgeProductsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Permanently deletes all selected + [Product][google.cloud.retail.v2beta.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2beta.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2beta.Product]s + may still be returned by + [ProductService.GetProduct][google.cloud.retail.v2beta.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2beta.Product]s, this operation + could take hours to complete. To get a sample of + [Product][google.cloud.retail.v2beta.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2beta.PurgeProductsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2beta + + def sample_purge_products(): + # Create a client + client = retail_v2beta.ProductServiceClient() + + # Initialize request argument(s) + request = retail_v2beta.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2beta.types.PurgeProductsRequest, dict]): + The request object. Request message for PurgeProducts + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2beta.types.PurgeProductsResponse` Response of the PurgeProductsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeProductsRequest): + request = purge_config.PurgeProductsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_products] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeProductsResponse, + metadata_type=purge_config.PurgeProductsMetadata, + ) + + # Done; return the response. + return response + def import_products( self, request: Optional[Union[import_config.ImportProductsRequest, dict]] = None, @@ -1740,10 +1857,11 @@ def add_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -1900,10 +2018,11 @@ def remove_fulfillment_places( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""It is recommended to use the + r"""We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/base.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/base.py index 0808585ebe13..4847d10380b7 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/base.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/base.py @@ -31,7 +31,7 @@ from google.cloud.retail_v2beta.types import import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service +from google.cloud.retail_v2beta.types import product_service, purge_config DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -157,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_products: gapic_v1.method.wrap_method( + self.purge_products, + default_timeout=None, + client_info=client_info, + ), self.import_products: gapic_v1.method.wrap_method( self.import_products, default_retry=retries.Retry( @@ -261,6 +266,15 @@ def delete_product( ]: raise NotImplementedError() + @property + def purge_products( + self, + ) -> Callable[ + [purge_config.PurgeProductsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def import_products( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc.py index 128d4c6080ce..ad9d71b61512 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc.py @@ -28,7 +28,7 @@ from google.cloud.retail_v2beta.types import import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service +from google.cloud.retail_v2beta.types import product_service, purge_config from .base import DEFAULT_CLIENT_INFO, ProductServiceTransport @@ -387,6 +387,51 @@ def delete_product( ) return self._stubs["delete_product"] + @property + def purge_products( + self, + ) -> Callable[[purge_config.PurgeProductsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge products method over gRPC. + + Permanently deletes all selected + [Product][google.cloud.retail.v2beta.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2beta.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2beta.Product]s + may still be returned by + [ProductService.GetProduct][google.cloud.retail.v2beta.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2beta.Product]s, this operation + could take hours to complete. To get a sample of + [Product][google.cloud.retail.v2beta.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2beta.PurgeProductsRequest.force] + to false. + + Returns: + Callable[[~.PurgeProductsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_products" not in self._stubs: + self._stubs["purge_products"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2beta.ProductService/PurgeProducts", + request_serializer=purge_config.PurgeProductsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_products"] + @property def import_products( self, @@ -507,10 +552,11 @@ def add_fulfillment_places( ]: r"""Return a callable for the add fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -565,10 +611,11 @@ def remove_fulfillment_places( ]: r"""Return a callable for the remove fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc_asyncio.py index 971d2891db9e..22c787fffef0 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/grpc_asyncio.py @@ -30,7 +30,7 @@ from google.cloud.retail_v2beta.types import import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service +from google.cloud.retail_v2beta.types import product_service, purge_config from .base import DEFAULT_CLIENT_INFO, ProductServiceTransport from .grpc import ProductServiceGrpcTransport @@ -398,6 +398,53 @@ def delete_product( ) return self._stubs["delete_product"] + @property + def purge_products( + self, + ) -> Callable[ + [purge_config.PurgeProductsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge products method over gRPC. + + Permanently deletes all selected + [Product][google.cloud.retail.v2beta.Product]s under a branch. + + This process is asynchronous. If the request is valid, the + removal will be enqueued and processed offline. Depending on the + number of [Product][google.cloud.retail.v2beta.Product]s, this + operation could take hours to complete. Before the operation + completes, some [Product][google.cloud.retail.v2beta.Product]s + may still be returned by + [ProductService.GetProduct][google.cloud.retail.v2beta.ProductService.GetProduct] + or + [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]. + + Depending on the number of + [Product][google.cloud.retail.v2beta.Product]s, this operation + could take hours to complete. To get a sample of + [Product][google.cloud.retail.v2beta.Product]s that would be + deleted, set + [PurgeProductsRequest.force][google.cloud.retail.v2beta.PurgeProductsRequest.force] + to false. + + Returns: + Callable[[~.PurgeProductsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_products" not in self._stubs: + self._stubs["purge_products"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2beta.ProductService/PurgeProducts", + request_serializer=purge_config.PurgeProductsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_products"] + @property def import_products( self, @@ -523,10 +570,11 @@ def add_fulfillment_places( ]: r"""Return a callable for the add fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] - method instead of - [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces]. + method instead of the + [ProductService.AddFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.AddFulfillmentPlaces] + method. [ProductService.AddLocalInventories][google.cloud.retail.v2beta.ProductService.AddLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -582,10 +630,11 @@ def remove_fulfillment_places( ]: r"""Return a callable for the remove fulfillment places method over gRPC. - It is recommended to use the + We recommend that you use the [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] - method instead of - [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces]. + method instead of the + [ProductService.RemoveFulfillmentPlaces][google.cloud.retail.v2beta.ProductService.RemoveFulfillmentPlaces] + method. [ProductService.RemoveLocalInventories][google.cloud.retail.v2beta.ProductService.RemoveLocalInventories] achieves the same results but provides more fine-grained control over ingesting local inventory data. @@ -777,6 +826,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_products: gapic_v1.method_async.wrap_method( + self.purge_products, + default_timeout=None, + client_info=client_info, + ), self.import_products: gapic_v1.method_async.wrap_method( self.import_products, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/rest.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/rest.py index 0accd9e13afe..3361abecf7eb 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/rest.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/transports/rest.py @@ -49,7 +49,7 @@ from google.cloud.retail_v2beta.types import import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service +from google.cloud.retail_v2beta.types import product_service, purge_config from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import ProductServiceTransport @@ -128,6 +128,14 @@ def post_list_products(self, response): logging.log(f"Received response: {response}") return response + def pre_purge_products(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_products(self, response): + logging.log(f"Received response: {response}") + return response + def pre_remove_fulfillment_places(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -312,6 +320,29 @@ def post_list_products( """ return response + def pre_purge_products( + self, + request: purge_config.PurgeProductsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeProductsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_products + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProductService server. + """ + return request, metadata + + def post_purge_products( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_products + + Override in a subclass to manipulate the response + after it is returned by the ProductService server but before + it is returned to user code. + """ + return response + def pre_remove_fulfillment_places( self, request: product_service.RemoveFulfillmentPlacesRequest, @@ -1252,6 +1283,100 @@ def __call__( resp = self._interceptor.post_list_products(resp) return resp + class _PurgeProducts(ProductServiceRestStub): + def __hash__(self): + return hash("PurgeProducts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeProductsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge products method over HTTP. + + Args: + request (~.purge_config.PurgeProductsRequest): + The request object. Request message for PurgeProducts + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta/{parent=projects/*/locations/*/catalogs/*/branches/*}/products:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_products(request, metadata) + pb_request = purge_config.PurgeProductsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_products(resp) + return resp + class _RemoveFulfillmentPlaces(ProductServiceRestStub): def __hash__(self): return hash("RemoveFulfillmentPlaces") @@ -1700,6 +1825,14 @@ def list_products( # In C++ this would require a dynamic_cast return self._ListProducts(self._session, self._host, self._interceptor) # type: ignore + @property + def purge_products( + self, + ) -> Callable[[purge_config.PurgeProductsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeProducts(self._session, self._host, self._interceptor) # type: ignore + @property def remove_fulfillment_places( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py index a1e9e0cd1aba..0ac5d54037d9 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py @@ -139,7 +139,14 @@ UpdateProductRequest, ) from .promotion import Promotion -from .purge_config import PurgeMetadata, PurgeUserEventsRequest, PurgeUserEventsResponse +from .purge_config import ( + PurgeMetadata, + PurgeProductsMetadata, + PurgeProductsRequest, + PurgeProductsResponse, + PurgeUserEventsRequest, + PurgeUserEventsResponse, +) from .search_service import ExperimentInfo, SearchRequest, SearchResponse from .serving_config import ServingConfig from .serving_config_service import ( @@ -275,6 +282,9 @@ "UpdateProductRequest", "Promotion", "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", "ExperimentInfo", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/catalog.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/catalog.py index 78960bc50d9e..fac5f7dc2caa 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/catalog.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/catalog.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.retail_v2beta.types import common, import_config @@ -167,7 +168,10 @@ class CatalogAttribute(proto.Message): faceted, or boosted in [SearchService.Search][google.cloud.retail.v2beta.SearchService.Search]. - Must be specified, otherwise throws INVALID_FORMAT error. + Must be specified when + [AttributesConfig.attribute_config_level][google.cloud.retail.v2beta.AttributesConfig.attribute_config_level] + is CATALOG_LEVEL_ATTRIBUTE_CONFIG, otherwise throws + INVALID_FORMAT error. dynamic_facetable_option (google.cloud.retail_v2beta.types.CatalogAttribute.DynamicFacetableOption): If DYNAMIC_FACETABLE_ENABLED, attribute values are available for dynamic facet. Could only be DYNAMIC_FACETABLE_DISABLED @@ -190,7 +194,10 @@ class CatalogAttribute(proto.Message): as there are no text values associated to numerical attributes. - Must be specified, otherwise throws INVALID_FORMAT error. + Must be specified, when + [AttributesConfig.attribute_config_level][google.cloud.retail.v2beta.AttributesConfig.attribute_config_level] + is CATALOG_LEVEL_ATTRIBUTE_CONFIG, otherwise throws + INVALID_FORMAT error. recommendations_filtering_option (google.cloud.retail_v2beta.types.RecommendationsFilteringOption): When [AttributesConfig.attribute_config_level][google.cloud.retail.v2beta.AttributesConfig.attribute_config_level] @@ -210,6 +217,8 @@ class CatalogAttribute(proto.Message): the search results. If unset, the server behavior defaults to [RETRIEVABLE_DISABLED][google.cloud.retail.v2beta.CatalogAttribute.RetrievableOption.RETRIEVABLE_DISABLED]. + facet_config (google.cloud.retail_v2beta.types.CatalogAttribute.FacetConfig): + Contains facet options. """ class AttributeType(proto.Enum): @@ -311,6 +320,214 @@ class RetrievableOption(proto.Enum): RETRIEVABLE_ENABLED = 1 RETRIEVABLE_DISABLED = 2 + class FacetConfig(proto.Message): + r"""Possible options for the facet that corresponds to the + current attribute config. + + Attributes: + facet_intervals (MutableSequence[google.cloud.retail_v2beta.types.Interval]): + If you don't set the facet + [SearchRequest.FacetSpec.FacetKey.intervals][google.cloud.retail.v2beta.SearchRequest.FacetSpec.FacetKey.intervals] + in the request to a numerical attribute, then we use the + computed intervals with rounded bounds obtained from all its + product numerical attribute values. The computed intervals + might not be ideal for some attributes. Therefore, we give + you the option to overwrite them with the facet_intervals + field. The maximum of facet intervals per + [CatalogAttribute][google.cloud.retail.v2beta.CatalogAttribute] + is 40. Each interval must have a lower bound or an upper + bound. If both bounds are provided, then the lower bound + must be smaller or equal than the upper bound. + ignored_facet_values (MutableSequence[google.cloud.retail_v2beta.types.CatalogAttribute.FacetConfig.IgnoredFacetValues]): + Each instance represents a list of attribute values to + ignore as facet values for a specific time range. The + maximum number of instances per + [CatalogAttribute][google.cloud.retail.v2beta.CatalogAttribute] + is 25. + merged_facet_values (MutableSequence[google.cloud.retail_v2beta.types.CatalogAttribute.FacetConfig.MergedFacetValue]): + Each instance replaces a list of facet values by a merged + facet value. If a facet value is not in any list, then it + will stay the same. To avoid conflicts, only paths of length + 1 are accepted. In other words, if "dark_blue" merged into + "BLUE", then the latter can't merge into "blues" because + this would create a path of length 2. The maximum number of + instances of MergedFacetValue per + [CatalogAttribute][google.cloud.retail.v2beta.CatalogAttribute] + is 100. This feature is available only for textual custom + attributes. + merged_facet (google.cloud.retail_v2beta.types.CatalogAttribute.FacetConfig.MergedFacet): + Use this field only if you want to merge a + facet key into another facet key. + rerank_config (google.cloud.retail_v2beta.types.CatalogAttribute.FacetConfig.RerankConfig): + Set this field only if you want to rerank + based on facet values engaged by the user for + the current key. This option is only possible + for custom facetable textual keys. + """ + + class IgnoredFacetValues(proto.Message): + r"""[Facet + values][google.cloud.retail.v2beta.SearchResponse.Facet.values] to + ignore on [facets][google.cloud.retail.v2beta.SearchResponse.Facet] + during the specified time range for the given + [SearchResponse.Facet.key][google.cloud.retail.v2beta.SearchResponse.Facet.key] + attribute. + + Attributes: + values (MutableSequence[str]): + List of facet values to ignore for the + following time range. The facet values are the + same as the attribute values. There is a limit + of 10 values per instance of IgnoredFacetValues. + Each value can have at most 128 characters. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time range for the current list of facet + values to ignore. If multiple time ranges are + specified for an facet value for the current + attribute, consider all of them. If both are + empty, ignore always. If start time and end time + are set, then start time must be before end + time. + If start time is not empty and end time is + empty, then will ignore these facet values after + the start time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If start time is empty and end time is not + empty, then ignore these facet values before end + time. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class MergedFacetValue(proto.Message): + r"""Replaces a set of textual facet values by the same (possibly + different) merged facet value. Each facet value should appear at + most once as a value per + [CatalogAttribute][google.cloud.retail.v2beta.CatalogAttribute]. + This feature is available only for textual custom attributes. + + Attributes: + values (MutableSequence[str]): + All the facet values that are replaces by the same + [merged_value][google.cloud.retail.v2beta.CatalogAttribute.FacetConfig.MergedFacetValue.merged_value] + that follows. The maximum number of values per + MergedFacetValue is 25. Each value can have up to 128 + characters. + merged_value (str): + All the previous values are replaced by this merged facet + value. This merged_value must be non-empty and can have up + to 128 characters. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + merged_value: str = proto.Field( + proto.STRING, + number=2, + ) + + class MergedFacet(proto.Message): + r"""The current facet key (i.e. attribute config) maps into the + [merged_facet_key][google.cloud.retail.v2beta.CatalogAttribute.FacetConfig.MergedFacet.merged_facet_key]. + A facet key can have at most one child. The current facet key and + the merged facet key need both to be textual custom attributes or + both numerical custom attributes (same type). + + Attributes: + merged_facet_key (str): + The merged facet key should be a valid facet + key that is different than the facet key of the + current catalog attribute. We refer this is + merged facet key as the child of the current + catalog attribute. This merged facet key can't + be a parent of another facet key (i.e. no + directed path of length 2). This merged facet + key needs to be either a textual custom + attribute or a numerical custom attribute. + """ + + merged_facet_key: str = proto.Field( + proto.STRING, + number=1, + ) + + class RerankConfig(proto.Message): + r"""Options to rerank based on facet values engaged by the user for the + current key. That key needs to be a custom textual key and + facetable. To use this control, you also need to pass all the facet + keys engaged by the user in the request using the field + [SearchRequest.FacetSpec]. In particular, if you don't pass the + facet keys engaged that you want to rerank on, this control won't be + effective. Moreover, to obtain better results, the facet values that + you want to rerank on should be close to English (ideally made of + words, underscores, and spaces). + + Attributes: + rerank_facet (bool): + If set to true, then we also rerank the + dynamic facets based on the facet values engaged + by the user for the current attribute key during + serving. + facet_values (MutableSequence[str]): + If empty, rerank on all facet values for the + current key. Otherwise, will rerank on the facet + values from this list only. + """ + + rerank_facet: bool = proto.Field( + proto.BOOL, + number=1, + ) + facet_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + facet_intervals: MutableSequence[common.Interval] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common.Interval, + ) + ignored_facet_values: MutableSequence[ + "CatalogAttribute.FacetConfig.IgnoredFacetValues" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CatalogAttribute.FacetConfig.IgnoredFacetValues", + ) + merged_facet_values: MutableSequence[ + "CatalogAttribute.FacetConfig.MergedFacetValue" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CatalogAttribute.FacetConfig.MergedFacetValue", + ) + merged_facet: "CatalogAttribute.FacetConfig.MergedFacet" = proto.Field( + proto.MESSAGE, + number=4, + message="CatalogAttribute.FacetConfig.MergedFacet", + ) + rerank_config: "CatalogAttribute.FacetConfig.RerankConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="CatalogAttribute.FacetConfig.RerankConfig", + ) + key: str = proto.Field( proto.STRING, number=1, @@ -356,6 +573,11 @@ class RetrievableOption(proto.Enum): number=12, enum=RetrievableOption, ) + facet_config: FacetConfig = proto.Field( + proto.MESSAGE, + number=13, + message=FacetConfig, + ) class AttributesConfig(proto.Message): @@ -448,7 +670,7 @@ class CompletionConfig(proto.Message): Can use [GetOperation][google.longrunning.Operations.GetOperation] - API to retrieve the latest state of the Long Running + API method to retrieve the latest state of the Long Running Operation. denylist_input_config (google.cloud.retail_v2beta.types.CompletionDataInputConfig): Output only. The source data for the latest @@ -525,12 +747,12 @@ class CompletionConfig(proto.Message): class MerchantCenterLink(proto.Message): r"""Represents a link between a Merchant Center account and a - branch. Once a link is established, products from the linked - merchant center account will be streamed to the linked branch. + branch. After a link is established, products from the linked + Merchant Center account are streamed to the linked branch. Attributes: merchant_center_account_id (int): - Required. The linked `Merchant center account + Required. The linked `Merchant Center account ID `__. The account must be a standalone account or a sub-account of a MCA. @@ -541,7 +763,7 @@ class MerchantCenterLink(proto.Message): configured default branch. However, changing the default branch later on won't change the linked branch here. - A single branch ID can only have one linked merchant center + A single branch ID can only have one linked Merchant Center account ID. destinations (MutableSequence[str]): String representing the destination to import for, all if @@ -660,10 +882,10 @@ class Catalog(proto.Message): Required. The product level configuration. merchant_center_linking_config (google.cloud.retail_v2beta.types.MerchantCenterLinkingConfig): The Merchant Center linking configuration. - Once a link is added, the data stream from + After a link is added, the data stream from Merchant Center to Cloud Retail will be enabled automatically. The requester must have access to - the merchant center account in order to make + the Merchant Center account in order to make changes to this field. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/common.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/common.py index 1580f596a7ef..52fc452d6d12 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/common.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/common.py @@ -137,6 +137,11 @@ class Condition(proto.Message): Range of time(s) specifying when Condition is active. Condition true if any time range matches. + page_categories (MutableSequence[str]): + Used to support browse uses cases. A list (up to 10 entries) + of categories or departments. The format should be the same + as + [UserEvent.page_categories][google.cloud.retail.v2beta.UserEvent.page_categories]; """ class QueryTerm(proto.Message): @@ -197,6 +202,10 @@ class TimeRange(proto.Message): number=3, message=TimeRange, ) + page_categories: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class Rule(proto.Message): @@ -251,6 +260,16 @@ class Rule(proto.Message): Treats a set of terms as synonyms of one another. + This field is a member of `oneof`_ ``action``. + force_return_facet_action (google.cloud.retail_v2beta.types.Rule.ForceReturnFacetAction): + Force returns an attribute as a facet in the + request. + + This field is a member of `oneof`_ ``action``. + remove_facet_action (google.cloud.retail_v2beta.types.Rule.RemoveFacetAction): + Remove an attribute as a facet in the request + (if present). + This field is a member of `oneof`_ ``action``. condition (google.cloud.retail_v2beta.types.Condition): Required. The condition that triggers the @@ -322,6 +341,7 @@ class FilterAction(proto.Message): provided with the SearchRequest. The AND operator is used to combine the query's existing filters with the filter rule(s). NOTE: May result in 0 results when filters conflict. + - Action Result: Filters the returned objects to be ONLY those that passed the filter. @@ -334,8 +354,8 @@ class FilterAction(proto.Message): must be set. - Filter syntax is identical to [SearchRequest.filter][google.cloud.retail.v2beta.SearchRequest.filter]. - See more details at the Retail Search `user - guide `__. + For more information, see + `Filter `__. - To filter products with product ID "product_1" or "product_2", and color "Red" or "Blue": *(id: ANY("product_1", "product_2"))* *AND* *(colorFamilies: @@ -350,11 +370,8 @@ class FilterAction(proto.Message): class RedirectAction(proto.Message): r"""Redirects a shopper to a specific page. - - Rule Condition: - - - Must specify - [Condition.query_terms][google.cloud.retail.v2beta.Condition.query_terms]. - + - Rule Condition: Must specify + [Condition.query_terms][google.cloud.retail.v2beta.Condition.query_terms]. - Action Input: Request Query - Action Result: Redirects shopper to provided uri. @@ -494,6 +511,108 @@ class IgnoreAction(proto.Message): number=1, ) + class ForceReturnFacetAction(proto.Message): + r"""Force returns an attribute/facet in the request around a certain + position or above. + + - Rule Condition: Must specify non-empty + [Condition.query_terms][google.cloud.retail.v2beta.Condition.query_terms] + (for search only) or + [Condition.page_categories][google.cloud.retail.v2beta.Condition.page_categories] + (for browse only), but can't specify both. + + - Action Inputs: attribute name, position + + - Action Result: Will force return a facet key around a certain + position or above if the condition is satisfied. + + Example: Suppose the query is "shoes", the + [Condition.query_terms][google.cloud.retail.v2beta.Condition.query_terms] + is "shoes", the + [ForceReturnFacetAction.FacetPositionAdjustment.attribute_name][google.cloud.retail.v2beta.Rule.ForceReturnFacetAction.FacetPositionAdjustment.attribute_name] + is "size" and the + [ForceReturnFacetAction.FacetPositionAdjustment.position][google.cloud.retail.v2beta.Rule.ForceReturnFacetAction.FacetPositionAdjustment.position] + is 8. + + Two cases: a) The facet key "size" is not already in the top 8 + slots, then the facet "size" will appear at a position close to 8. + b) The facet key "size" in among the top 8 positions in the request, + then it will stay at its current rank. + + Attributes: + facet_position_adjustments (MutableSequence[google.cloud.retail_v2beta.types.Rule.ForceReturnFacetAction.FacetPositionAdjustment]): + Each instance corresponds to a force return + attribute for the given condition. There can't + be more 3 instances here. + """ + + class FacetPositionAdjustment(proto.Message): + r"""Each facet position adjustment consists of a single attribute + name (i.e. facet key) along with a specified position. + + Attributes: + attribute_name (str): + The attribute name to force return as a + facet. Each attribute name should be a valid + attribute name, be non-empty and contain at most + 80 characters long. + position (int): + This is the position in the request as + explained above. It should be strictly positive + be at most 100. + """ + + attribute_name: str = proto.Field( + proto.STRING, + number=1, + ) + position: int = proto.Field( + proto.INT32, + number=2, + ) + + facet_position_adjustments: MutableSequence[ + "Rule.ForceReturnFacetAction.FacetPositionAdjustment" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Rule.ForceReturnFacetAction.FacetPositionAdjustment", + ) + + class RemoveFacetAction(proto.Message): + r"""Removes an attribute/facet in the request if is present. + + - Rule Condition: Must specify non-empty + [Condition.query_terms][google.cloud.retail.v2beta.Condition.query_terms] + (for search only) or + [Condition.page_categories][google.cloud.retail.v2beta.Condition.page_categories] + (for browse only), but can't specify both. + + - Action Input: attribute name + + - Action Result: Will remove the attribute (as a facet) from the + request if it is present. + + Example: Suppose the query is "shoes", the + [Condition.query_terms][google.cloud.retail.v2beta.Condition.query_terms] + is "shoes" and the attribute name "size", then facet key "size" will + be removed from the request (if it is present). + + Attributes: + attribute_names (MutableSequence[str]): + The attribute names (i.e. facet keys) to + remove from the dynamic facets (if present in + the request). There can't be more 3 attribute + names. Each attribute name should be a valid + attribute name, be non-empty and contain at most + 80 characters. + """ + + attribute_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + boost_action: BoostAction = proto.Field( proto.MESSAGE, number=2, @@ -542,6 +661,18 @@ class IgnoreAction(proto.Message): oneof="action", message=TwowaySynonymsAction, ) + force_return_facet_action: ForceReturnFacetAction = proto.Field( + proto.MESSAGE, + number=12, + oneof="action", + message=ForceReturnFacetAction, + ) + remove_facet_action: RemoveFacetAction = proto.Field( + proto.MESSAGE, + number=13, + oneof="action", + message=RemoveFacetAction, + ) condition: "Condition" = proto.Field( proto.MESSAGE, number=1, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/completion_service.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/completion_service.py index ddb25089bb4e..6b721043b018 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/completion_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/completion_service.py @@ -106,12 +106,16 @@ class CompleteQueryRequest(proto.Message): The maximum allowed max suggestions is 20. If it is set higher, it will be capped by 20. + enable_attribute_suggestions (bool): + If true, attribute suggestions are enabled + and provided in response. + This field is only available for "cloud-retail" + dataset. entity (str): - The entity for customers that may run multiple different - entities, domains, sites or regions, for example, - ``Google US``, ``Google Ads``, ``Waymo``, ``google.com``, - ``youtube.com``, etc. If this is set, it should be exactly - matched with + The entity for customers who run multiple entities, domains, + sites, or regions, for example, ``Google US``, + ``Google Ads``, ``Waymo``, ``google.com``, ``youtube.com``, + etc. If this is set, it must be an exact match with [UserEvent.entity][google.cloud.retail.v2beta.UserEvent.entity] to get per-entity autocomplete results. """ @@ -144,6 +148,10 @@ class CompleteQueryRequest(proto.Message): proto.INT32, number=5, ) + enable_attribute_suggestions: bool = proto.Field( + proto.BOOL, + number=9, + ) entity: str = proto.Field( proto.STRING, number=10, @@ -164,10 +172,10 @@ class CompleteQueryResponse(proto.Message): for search events resulting from this completion, which enables accurate attribution of complete model performance. recent_search_results (MutableSequence[google.cloud.retail_v2beta.types.CompleteQueryResponse.RecentSearchResult]): - Matched recent searches of this user. The maximum number of - recent searches is 10. This field is a restricted feature. - Contact Retail Search support team if you are interested in - enabling it. + Deprecated. Matched recent searches of this user. The + maximum number of recent searches is 10. This field is a + restricted feature. If you want to enable it, contact Retail + Search support. This feature is only available when [CompleteQueryRequest.visitor_id][google.cloud.retail.v2beta.CompleteQueryRequest.visitor_id] @@ -219,7 +227,7 @@ class CompletionResult(proto.Message): ) class RecentSearchResult(proto.Message): - r"""Recent search of this user. + r"""Deprecated: Recent search of this user. Attributes: recent_search (str): diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/import_config.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/import_config.py index 2fb89f766bc5..1ccf02734486 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/import_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/import_config.py @@ -275,7 +275,9 @@ class ImportProductsRequest(proto.Message): during the Import. update_mask (google.protobuf.field_mask_pb2.FieldMask): Indicates which fields in the provided imported ``products`` - to update. If not set, all fields are updated. + to update. If not set, all fields are updated. If provided, + only the existing product fields are updated. Missing + products will not be created. reconciliation_mode (google.cloud.retail_v2beta.types.ImportProductsRequest.ReconciliationMode): The mode of reconciliation between existing products and the products to be imported. Defaults to @@ -290,10 +292,16 @@ class ImportProductsRequest(proto.Message): ``projects/{project}/topics/{topic}``. It has to be within the same project as [ImportProductsRequest.parent][google.cloud.retail.v2beta.ImportProductsRequest.parent]. - Make sure that + Make sure that both + ``cloud-retail-customer-data-access@system.gserviceaccount.com`` + and ``service-@gcp-sa-retail.iam.gserviceaccount.com`` - has the ``pubsub.topics.publish`` IAM permission on the + have the ``pubsub.topics.publish`` IAM permission on the topic. + + Only supported when + [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2beta.ImportProductsRequest.reconciliation_mode] + is set to ``FULL``. """ class ReconciliationMode(proto.Enum): diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/model.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/model.py index 31d0fba44946..15909e2c7d9a 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/model.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/model.py @@ -150,6 +150,8 @@ class Model(proto.Message): Output only. The list of valid serving configs associated with the PageOptimizationConfig. + model_features_config (google.cloud.retail_v2beta.types.Model.ModelFeaturesConfig): + Optional. Additional model features config. """ class ServingState(proto.Enum): @@ -235,6 +237,27 @@ class DataState(proto.Enum): DATA_OK = 1 DATA_ERROR = 2 + class ContextProductsType(proto.Enum): + r"""Use single or multiple context products for recommendations. + + Values: + CONTEXT_PRODUCTS_TYPE_UNSPECIFIED (0): + Unspecified default value, should never be explicitly set. + Defaults to + [MULTIPLE_CONTEXT_PRODUCTS][google.cloud.retail.v2beta.Model.ContextProductsType.MULTIPLE_CONTEXT_PRODUCTS]. + SINGLE_CONTEXT_PRODUCT (1): + Use only a single product as context for the + recommendation. Typically used on pages like + add-to-cart or product details. + MULTIPLE_CONTEXT_PRODUCTS (2): + Use one or multiple products as context for + the recommendation. Typically used on shopping + cart pages. + """ + CONTEXT_PRODUCTS_TYPE_UNSPECIFIED = 0 + SINGLE_CONTEXT_PRODUCT = 1 + MULTIPLE_CONTEXT_PRODUCTS = 2 + class ServingConfigList(proto.Message): r"""Represents an ordered combination of valid serving configs, which can be used for ``PAGE_OPTIMIZATION`` recommendations. @@ -250,6 +273,45 @@ class ServingConfigList(proto.Message): number=1, ) + class FrequentlyBoughtTogetherFeaturesConfig(proto.Message): + r"""Additional configs for the frequently-bought-together model + type. + + Attributes: + context_products_type (google.cloud.retail_v2beta.types.Model.ContextProductsType): + Optional. Specifies the context of the model when it is used + in predict requests. Can only be set for the + ``frequently-bought-together`` type. If it isn't specified, + it defaults to + [MULTIPLE_CONTEXT_PRODUCTS][google.cloud.retail.v2beta.Model.ContextProductsType.MULTIPLE_CONTEXT_PRODUCTS]. + """ + + context_products_type: "Model.ContextProductsType" = proto.Field( + proto.ENUM, + number=2, + enum="Model.ContextProductsType", + ) + + class ModelFeaturesConfig(proto.Message): + r"""Additional model features config. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + frequently_bought_together_config (google.cloud.retail_v2beta.types.Model.FrequentlyBoughtTogetherFeaturesConfig): + Additional configs for + frequently-bought-together models. + + This field is a member of `oneof`_ ``type_dedicated_config``. + """ + + frequently_bought_together_config: "Model.FrequentlyBoughtTogetherFeaturesConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type_dedicated_config", + message="Model.FrequentlyBoughtTogetherFeaturesConfig", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -315,6 +377,11 @@ class ServingConfigList(proto.Message): number=19, message=ServingConfigList, ) + model_features_config: ModelFeaturesConfig = proto.Field( + proto.MESSAGE, + number=22, + message=ModelFeaturesConfig, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/product.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/product.py index ff245afa0979..93555c3686ab 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/product.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/product.py @@ -46,29 +46,25 @@ class Product(proto.Message): Attributes: expire_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp when this product becomes unavailable for - [SearchService.Search][google.cloud.retail.v2beta.SearchService.Search]. - Note that this is only applicable to - [Type.PRIMARY][google.cloud.retail.v2beta.Product.Type.PRIMARY] - and - [Type.COLLECTION][google.cloud.retail.v2beta.Product.Type.COLLECTION], - and ignored for - [Type.VARIANT][google.cloud.retail.v2beta.Product.Type.VARIANT]. + Note that this field is applied in the following ways: + + - If the [Product][google.cloud.retail.v2beta.Product] is + already expired when it is uploaded, this product is not + indexed for search. + + - If the [Product][google.cloud.retail.v2beta.Product] is + not expired when it is uploaded, only the + [Type.PRIMARY][google.cloud.retail.v2beta.Product.Type.PRIMARY]'s + and + [Type.COLLECTION][google.cloud.retail.v2beta.Product.Type.COLLECTION]'s + expireTime is respected, and + [Type.VARIANT][google.cloud.retail.v2beta.Product.Type.VARIANT]'s + expireTime is not used. + In general, we suggest the users to delete the stale products explicitly, instead of using this field to determine staleness. - If it is set, the - [Product][google.cloud.retail.v2beta.Product] is not - available for - [SearchService.Search][google.cloud.retail.v2beta.SearchService.Search] - after - [expire_time][google.cloud.retail.v2beta.Product.expire_time]. - However, the product can still be retrieved by - [ProductService.GetProduct][google.cloud.retail.v2beta.ProductService.GetProduct] - and - [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]. - [expire_time][google.cloud.retail.v2beta.Product.expire_time] must be later than [available_time][google.cloud.retail.v2beta.Product.available_time] @@ -221,7 +217,8 @@ class Product(proto.Message): INVALID_ARGUMENT error is returned. At most 250 values are allowed per - [Product][google.cloud.retail.v2beta.Product]. Empty values + [Product][google.cloud.retail.v2beta.Product] unless + overridden through the Google Cloud console. Empty values are not allowed. Each value must be a UTF-8 encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is returned. @@ -244,10 +241,10 @@ class Product(proto.Message): brands (MutableSequence[str]): The brands of the product. - A maximum of 30 brands are allowed. Each brand must be a - UTF-8 encoded string with a length limit of 1,000 - characters. Otherwise, an INVALID_ARGUMENT error is - returned. + A maximum of 30 brands are allowed unless overridden through + the Google Cloud console. Each brand must be a UTF-8 encoded + string with a length limit of 1,000 characters. Otherwise, + an INVALID_ARGUMENT error is returned. Corresponding properties: Google Merchant Center property `brand `__. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/promotion.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/promotion.py index 2616ff09d556..33dfd0c2be26 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/promotion.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/promotion.py @@ -40,8 +40,8 @@ class Promotion(proto.Message): ID_1_LIKE_THIS. Otherwise, an INVALID_ARGUMENT error is returned. - Google Merchant Center property - `promotion `__. + Corresponds to Google Merchant Center property + `promotion_id `__. """ promotion_id: str = proto.Field( diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/purge_config.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/purge_config.py index 3afe0bf0a149..535f7423d845 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/purge_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/purge_config.py @@ -17,12 +17,16 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.retail.v2beta", manifest={ "PurgeMetadata", + "PurgeProductsMetadata", + "PurgeProductsRequest", + "PurgeProductsResponse", "PurgeUserEventsRequest", "PurgeUserEventsResponse", }, @@ -37,6 +41,145 @@ class PurgeMetadata(proto.Message): """ +class PurgeProductsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeProducts + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class PurgeProductsRequest(proto.Message): + r"""Request message for PurgeProducts method. + + Attributes: + parent (str): + Required. The resource name of the branch under which the + products are created. The format is + ``projects/${projectId}/locations/global/catalogs/${catalogId}/branches/${branchId}`` + filter (str): + Required. The filter string to specify the products to be + deleted with a length limit of 5,000 characters. + + Empty string filter is not allowed. "*" implies delete all + items in a branch. + + The eligible fields for filtering are: + + - ``availability``: Double quoted + [Product.availability][google.cloud.retail.v2beta.Product.availability] + string. + - ``create_time`` : in ISO 8601 "zulu" format. + + Supported syntax: + + - Comparators (">", "<", ">=", "<=", "="). Examples: + + - create_time <= "2015-02-13T17:05:46Z" + - availability = "IN_STOCK" + + - Conjunctions ("AND") Examples: + + - create_time <= "2015-02-13T17:05:46Z" AND availability + = "PREORDER" + + - Disjunctions ("OR") Examples: + + - create_time <= "2015-02-13T17:05:46Z" OR availability + = "IN_STOCK" + + - Can support nested queries. Examples: + + - (create_time <= "2015-02-13T17:05:46Z" AND + availability = "PREORDER") OR (create_time >= + "2015-02-14T13:03:32Z" AND availability = "IN_STOCK") + + - Filter Limits: + + - Filter should not contain more than 6 conditions. + - Max nesting depth should not exceed 2 levels. + + Examples queries: + + - Delete back order products created before a timestamp. + create_time <= "2015-02-13T17:05:46Z" OR availability = + "BACKORDER". + force (bool): + Actually perform the purge. If ``force`` is set to false, + the method will return the expected purge count without + deleting any products. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeProductsResponse(proto.Message): + r"""Response of the PurgeProductsRequest. If the long running + operation is successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of products purged as a + result of the operation. + purge_sample (MutableSequence[str]): + A sample of the product names that will be deleted. Only + populated if ``force`` is set to false. A max of 100 names + will be returned and the names are chosen at random. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + purge_sample: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class PurgeUserEventsRequest(proto.Message): r"""Request message for PurgeUserEvents method. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/search_service.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/search_service.py index 898686181305..ddc8d1ce8851 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/search_service.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/search_service.py @@ -50,7 +50,7 @@ class SearchRequest(proto.Message): or the name of the legacy placement resource, such as ``projects/*/locations/global/catalogs/default_catalog/placements/default_search``. This field is used to identify the serving config name and - the set of models that will be used to make the search. + the set of models that are used to make the search. branch (str): The branch resource name, such as ``projects/*/locations/global/catalogs/default_catalog/branches/0``. @@ -113,8 +113,8 @@ class SearchRequest(proto.Message): The filter syntax consists of an expression language for constructing a predicate from one or more fields of the products being filtered. Filter expression is - case-sensitive. See more details at this `user - guide `__. + case-sensitive. For more information, see + `Filter `__. If this field is unrecognizable, an INVALID_ARGUMENT is returned. @@ -123,24 +123,22 @@ class SearchRequest(proto.Message): search without checking any filters on the search page. The filter applied to every search request when quality - improvement such as query expansion is needed. For example, - if a query does not have enough results, an expanded query - with - [SearchRequest.canonical_filter][google.cloud.retail.v2beta.SearchRequest.canonical_filter] - will be returned as a supplement of the original query. This - field is strongly recommended to achieve high search - quality. - - See - [SearchRequest.filter][google.cloud.retail.v2beta.SearchRequest.filter] - for more details about filter syntax. + improvement such as query expansion is needed. In the case a + query does not have a sufficient amount of results this + filter will be used to determine whether or not to enable + the query expansion flow. The original filter will still be + used for the query expanded search. This field is strongly + recommended to achieve high search quality. + + For more information about filter syntax, see + [SearchRequest.filter][google.cloud.retail.v2beta.SearchRequest.filter]. order_by (str): The order in which products are returned. Products can be ordered by a field in an [Product][google.cloud.retail.v2beta.Product] object. Leave it unset if ordered by relevance. OrderBy expression is - case-sensitive. See more details at this `user - guide `__. + case-sensitive. For more information, see + `Order `__. If this field is unrecognizable, an INVALID_ARGUMENT is returned. @@ -159,9 +157,9 @@ class SearchRequest(proto.Message): facets. Notice that only textual facets can be dynamically generated. boost_spec (google.cloud.retail_v2beta.types.SearchRequest.BoostSpec): - Boost specification to boost certain products. See more - details at this `user - guide `__. + Boost specification to boost certain products. For more + information, see `Boost + results `__. Notice that if both [ServingConfig.boost_control_ids][google.cloud.retail.v2beta.ServingConfig.boost_control_ids] @@ -173,9 +171,9 @@ class SearchRequest(proto.Message): boost scores from all matched boost conditions. query_expansion_spec (google.cloud.retail_v2beta.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the - conditions under which query expansion will occur. See more - details at this `user - guide `__. + conditions under which query expansion occurs. For more + information, see `Query + expansion `__. variant_rollup_keys (MutableSequence[str]): The keys to fetch and rollup the matching [variant][google.cloud.retail.v2beta.Product.Type.VARIANT] @@ -310,9 +308,9 @@ class SearchRequest(proto.Message): - Keys must start with a lowercase letter or international character. - See `Google Cloud - Document `__ - for more details. + For more information, see `Requirements for + labels `__ + in the Resource Manager documentation. spell_correction_spec (google.cloud.retail_v2beta.types.SearchRequest.SpellCorrectionSpec): The spell correction specification that specifies the mode under which spell correction @@ -420,18 +418,17 @@ class FacetSpec(proto.Message): enable_dynamic_position (bool): Enables dynamic position for this facet. If set to true, the position of this facet among all facets in the response is - determined by Google Retail Search. It will be ordered - together with dynamic facets if dynamic facets is enabled. - If set to false, the position of this facet in the response - will be the same as in the request, and it will be ranked - before the facets with dynamic position enable and all - dynamic facets. + determined by Google Retail Search. It is ordered together + with dynamic facets if dynamic facets is enabled. If set to + false, the position of this facet in the response is the + same as in the request, and it is ranked before the facets + with dynamic position enable and all dynamic facets. For example, you may always want to have rating facet returned in the response, but it's not necessarily to always display the rating facet at the top. In that case, you can set enable_dynamic_position to true so that the position of - rating facet in response will be determined by Google Retail + rating facet in response is determined by Google Retail Search. Another example, assuming you have the following facets in @@ -443,14 +440,14 @@ class FacetSpec(proto.Message): - "brands", enable_dynamic_position = false - And also you have a dynamic facets enable, which will - generate a facet 'gender'. Then the final order of the - facets in the response can be ("price", "brands", "rating", - "gender") or ("price", "brands", "gender", "rating") depends - on how Google Retail Search orders "gender" and "rating" - facets. However, notice that "price" and "brands" will - always be ranked at 1st and 2nd position since their - enable_dynamic_position are false. + And also you have a dynamic facets enable, which generates a + facet "gender". Then, the final order of the facets in the + response can be ("price", "brands", "rating", "gender") or + ("price", "brands", "gender", "rating") depends on how + Google Retail Search orders "gender" and "rating" facets. + However, notice that "price" and "brands" are always ranked + at first and second position because their + enable_dynamic_position values are false. """ class FacetKey(proto.Message): @@ -510,15 +507,15 @@ class FacetKey(proto.Message): For all numerical facet keys that appear in the list of products from the catalog, the - percentiles 0, 10, 30, 50, 70, 90 and 100 are + percentiles 0, 10, 30, 50, 70, 90, and 100 are computed from their distribution weekly. If the model assigns a high score to a numerical facet key and its intervals are not specified in the - search request, these percentiles will become - the bounds for its intervals and will be - returned in the response. If the facet key - intervals are specified in the request, then the - specified intervals will be returned instead. + search request, these percentiles become the + bounds for its intervals and are returned in the + response. If the facet key intervals are + specified in the request, then the specified + intervals are returned instead. restricted_values (MutableSequence[str]): Only get facet for the given restricted values. For example, when using "pickupInStore" as key and set restricted values @@ -552,17 +549,16 @@ class FacetKey(proto.Message): "categories" has three values "Women > Shoe", "Women > Dress" and "Men > Shoe". If set "prefixes" to "Women", the "categories" facet - will give only "Women > Shoe" and "Women > - Dress". Only supported on textual fields. - Maximum is 10. + gives only "Women > Shoe" and "Women > Dress". + Only supported on textual fields. Maximum is 10. contains (MutableSequence[str]): Only get facet values that contains the given strings. For example, suppose "categories" has three values "Women > Shoe", "Women > Dress" and "Men > Shoe". If set "contains" to "Shoe", the - "categories" facet will give only "Women > Shoe" - and "Men > Shoe". Only supported on textual - fields. Maximum is 10. + "categories" facet gives only "Women > Shoe" and + "Men > Shoe". Only supported on textual fields. + Maximum is 10. case_insensitive (bool): True to make facet keys case insensitive when getting faceting values with prefixes or @@ -591,7 +587,7 @@ class FacetKey(proto.Message): [FacetSpec.FacetKey.restricted_values][google.cloud.retail.v2beta.SearchRequest.FacetSpec.FacetKey.restricted_values]. query (str): The query that is used to compute facet for the given facet - key. When provided, it will override the default behavior of + key. When provided, it overrides the default behavior of facet computation. The query syntax is the same as a filter expression. See [SearchRequest.filter][google.cloud.retail.v2beta.SearchRequest.filter] @@ -602,9 +598,9 @@ class FacetKey(proto.Message): In the response, [SearchResponse.Facet.values.value][google.cloud.retail.v2beta.SearchResponse.Facet.FacetValue.value] - will be always "1" and + is always "1" and [SearchResponse.Facet.values.count][google.cloud.retail.v2beta.SearchResponse.Facet.FacetValue.count] - will be the number of results that match the query. + is the number of results that match the query. For example, you can set a customized facet for "shipToStore", where @@ -612,7 +608,7 @@ class FacetKey(proto.Message): is "customizedShipToStore", and [FacetKey.query][google.cloud.retail.v2beta.SearchRequest.FacetSpec.FacetKey.query] is "availability: ANY("IN_STOCK") AND shipToStore: - ANY("123")". Then the facet will count the products that are + ANY("123")". Then the facet counts the products that are both in stock and ship to store "123". return_min_max (bool): Returns the min and max value for each @@ -1371,7 +1367,7 @@ def raw_page(self): class ExperimentInfo(proto.Message): - r"""Metadata for active A/B testing [Experiments][]. + r"""Metadata for active A/B testing [Experiment][]. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -1400,8 +1396,8 @@ class ServingConfigExperiment(proto.Message): ``projects/*/locations/*/catalogs/*/servingConfigs/*``. experiment_serving_config (str): The fully qualified resource name of the serving config - [VariantArm.serving_config_id][] responsible for generating - the search response. For example: + [Experiment.VariantArm.serving_config_id][] responsible for + generating the search response. For example: ``projects/*/locations/*/catalogs/*/servingConfigs/*``. """ diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/serving_config.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/serving_config.py index 80bc0c6423c6..f50be79acc7a 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/serving_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/serving_config.py @@ -235,6 +235,10 @@ class ServingConfig(proto.Message): [solution_types][google.cloud.retail.v2beta.ServingConfig.solution_types] is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.retail.v2main.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. + ignore_recs_denylist (bool): + When the flag is enabled, the products in the + denylist will not be filtered out in the + recommendation filtering results. personalization_spec (google.cloud.retail_v2beta.types.SearchRequest.PersonalizationSpec): The specification for personalization spec. @@ -343,6 +347,10 @@ class DiversityType(proto.Enum): proto.STRING, number=16, ) + ignore_recs_denylist: bool = proto.Field( + proto.BOOL, + number=24, + ) personalization_spec: search_service.SearchRequest.PersonalizationSpec = ( proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/user_event.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/user_event.py index 7b821e893111..043e8b1cb440 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/user_event.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/user_event.py @@ -44,6 +44,7 @@ class UserEvent(proto.Message): Required. User event type. Allowed values are: - ``add-to-cart``: Products being added to cart. + - ``remove-from-cart``: Products being removed from cart. - ``category-page-view``: Special pages such as sale or promotion pages viewed. - ``detail-page-view``: Products detail page viewed. @@ -295,8 +296,8 @@ class UserEvent(proto.Message): The entity for customers that may run multiple different entities, domains, sites or regions, for example, ``Google US``, ``Google Ads``, ``Waymo``, ``google.com``, - ``youtube.com``, etc. It is recommended to set this field to - get better per-entity search, completion and prediction + ``youtube.com``, etc. We recommend that you set this field + to get better per-entity search, completion, and prediction results. """ diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2_generated_product_service_purge_products_async.py similarity index 71% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py rename to packages/google-cloud-retail/samples/generated_samples/retail_v2_generated_product_service_purge_products_async.py index 8906d65cd900..32e4f920a275 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2_generated_product_service_purge_products_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for CreateDataset +# Snippet for PurgeProducts # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-retail -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_sync] +# [START retail_v2_generated_ProductService_PurgeProducts_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,27 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import retail_v2 -def sample_create_dataset(): +async def sample_purge_products(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = retail_v2.ProductServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + request = retail_v2.PurgeProductsRequest( parent="parent_value", + filter="filter_value", ) # Make the request - response = client.create_dataset(request=request) + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_sync] +# [END retail_v2_generated_ProductService_PurgeProducts_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2_generated_product_service_purge_products_sync.py similarity index 71% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py rename to packages/google-cloud-retail/samples/generated_samples/retail_v2_generated_product_service_purge_products_sync.py index cb6bd2c2d7ca..b4f86f516e64 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2_generated_product_service_purge_products_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for CreateDataset +# Snippet for PurgeProducts # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-cloud-retail -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_async] +# [START retail_v2_generated_ProductService_PurgeProducts_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,27 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.cloud import retail_v2 -async def sample_create_dataset(): +def sample_purge_products(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = retail_v2.ProductServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + request = retail_v2.PurgeProductsRequest( parent="parent_value", + filter="filter_value", ) # Make the request - response = await client.create_dataset(request=request) + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_async] +# [END retail_v2_generated_ProductService_PurgeProducts_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_get_branch_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_get_branch_async.py new file mode 100644 index 000000000000..45cb39c36750 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_get_branch_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBranch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_BranchService_GetBranch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_get_branch(): + # Create a client + client = retail_v2alpha.BranchServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetBranchRequest( + name="name_value", + ) + + # Make the request + response = await client.get_branch(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_BranchService_GetBranch_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_get_branch_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_get_branch_sync.py new file mode 100644 index 000000000000..58a9b2a16561 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_get_branch_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBranch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_BranchService_GetBranch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_get_branch(): + # Create a client + client = retail_v2alpha.BranchServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetBranchRequest( + name="name_value", + ) + + # Make the request + response = client.get_branch(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_BranchService_GetBranch_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_list_branches_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_list_branches_async.py new file mode 100644 index 000000000000..b42552f68786 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_list_branches_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBranches +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_BranchService_ListBranches_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_list_branches(): + # Create a client + client = retail_v2alpha.BranchServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListBranchesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_branches(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_BranchService_ListBranches_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_list_branches_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_list_branches_sync.py new file mode 100644 index 000000000000..2924b44e0427 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_branch_service_list_branches_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBranches +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_BranchService_ListBranches_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_list_branches(): + # Create a client + client = retail_v2alpha.BranchServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListBranchesRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_branches(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_BranchService_ListBranches_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_accept_terms_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_accept_terms_async.py new file mode 100644 index 000000000000..b01fcece7e54 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_accept_terms_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AcceptTerms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_AcceptTerms_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_accept_terms(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.AcceptTermsRequest( + project="project_value", + ) + + # Make the request + response = await client.accept_terms(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_AcceptTerms_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_accept_terms_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_accept_terms_sync.py new file mode 100644 index 000000000000..52cc29b36dcf --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_accept_terms_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AcceptTerms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_AcceptTerms_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_accept_terms(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.AcceptTermsRequest( + project="project_value", + ) + + # Make the request + response = client.accept_terms(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_AcceptTerms_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_enroll_solution_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_enroll_solution_async.py new file mode 100644 index 000000000000..c3d517a04276 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_enroll_solution_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnrollSolution +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_EnrollSolution_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_enroll_solution(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.EnrollSolutionRequest( + project="project_value", + solution="SOLUTION_TYPE_SEARCH", + ) + + # Make the request + operation = client.enroll_solution(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_EnrollSolution_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_enroll_solution_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_enroll_solution_sync.py new file mode 100644 index 000000000000..5c6ddc3949a4 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_enroll_solution_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnrollSolution +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_EnrollSolution_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_enroll_solution(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.EnrollSolutionRequest( + project="project_value", + solution="SOLUTION_TYPE_SEARCH", + ) + + # Make the request + operation = client.enroll_solution(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_EnrollSolution_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_alert_config_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_alert_config_async.py new file mode 100644 index 000000000000..530a9b2051af --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_alert_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAlertConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_GetAlertConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_get_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetAlertConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_alert_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_GetAlertConfig_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_alert_config_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_alert_config_sync.py new file mode 100644 index 000000000000..edc5770c2cdb --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_alert_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAlertConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_GetAlertConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_get_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetAlertConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_alert_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_GetAlertConfig_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_logging_config_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_logging_config_async.py new file mode 100644 index 000000000000..826e45098361 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_logging_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLoggingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_GetLoggingConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_get_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetLoggingConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_logging_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_GetLoggingConfig_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_logging_config_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_logging_config_sync.py new file mode 100644 index 000000000000..79bd937304f0 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_logging_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLoggingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_GetLoggingConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_get_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetLoggingConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_logging_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_GetLoggingConfig_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_project_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_project_async.py new file mode 100644 index 000000000000..da9a884ca2f8 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_project_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProject +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_GetProject_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_get_project(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetProjectRequest( + name="name_value", + ) + + # Make the request + response = await client.get_project(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_GetProject_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_project_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_project_sync.py new file mode 100644 index 000000000000..a918ce691ab5 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_get_project_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProject +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_GetProject_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_get_project(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.GetProjectRequest( + name="name_value", + ) + + # Make the request + response = client.get_project(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_GetProject_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_list_enrolled_solutions_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_list_enrolled_solutions_async.py new file mode 100644 index 000000000000..74a371efb17f --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_list_enrolled_solutions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnrolledSolutions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_ListEnrolledSolutions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_list_enrolled_solutions(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListEnrolledSolutionsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_enrolled_solutions(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_ListEnrolledSolutions_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_list_enrolled_solutions_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_list_enrolled_solutions_sync.py new file mode 100644 index 000000000000..42b621c7f0e8 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_list_enrolled_solutions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnrolledSolutions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_ListEnrolledSolutions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_list_enrolled_solutions(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + request = retail_v2alpha.ListEnrolledSolutionsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_enrolled_solutions(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_ListEnrolledSolutions_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_alert_config_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_alert_config_async.py new file mode 100644 index 000000000000..fd101041e9ce --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_alert_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAlertConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_UpdateAlertConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_update_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + alert_config = retail_v2alpha.AlertConfig() + alert_config.name = "name_value" + + request = retail_v2alpha.UpdateAlertConfigRequest( + alert_config=alert_config, + ) + + # Make the request + response = await client.update_alert_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_UpdateAlertConfig_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_alert_config_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_alert_config_sync.py new file mode 100644 index 000000000000..eceb0063e05c --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_alert_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAlertConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_UpdateAlertConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_update_alert_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + alert_config = retail_v2alpha.AlertConfig() + alert_config.name = "name_value" + + request = retail_v2alpha.UpdateAlertConfigRequest( + alert_config=alert_config, + ) + + # Make the request + response = client.update_alert_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_UpdateAlertConfig_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_logging_config_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_logging_config_async.py new file mode 100644 index 000000000000..6ad466d26e26 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_logging_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLoggingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_UpdateLoggingConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_update_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceAsyncClient() + + # Initialize request argument(s) + logging_config = retail_v2alpha.LoggingConfig() + logging_config.name = "name_value" + + request = retail_v2alpha.UpdateLoggingConfigRequest( + logging_config=logging_config, + ) + + # Make the request + response = await client.update_logging_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_UpdateLoggingConfig_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_logging_config_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_logging_config_sync.py new file mode 100644 index 000000000000..5ce9ba215610 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_project_service_update_logging_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLoggingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_ProjectService_UpdateLoggingConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_update_logging_config(): + # Create a client + client = retail_v2alpha.ProjectServiceClient() + + # Initialize request argument(s) + logging_config = retail_v2alpha.LoggingConfig() + logging_config.name = "name_value" + + request = retail_v2alpha.UpdateLoggingConfigRequest( + logging_config=logging_config, + ) + + # Make the request + response = client.update_logging_config(request=request) + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_ProjectService_UpdateLoggingConfig_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_product_service_purge_products_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_product_service_purge_products_async.py new file mode 100644 index 000000000000..74eb25535b49 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_product_service_purge_products_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeProducts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2beta_generated_ProductService_PurgeProducts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2beta + + +async def sample_purge_products(): + # Create a client + client = retail_v2beta.ProductServiceAsyncClient() + + # Initialize request argument(s) + request = retail_v2beta.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END retail_v2beta_generated_ProductService_PurgeProducts_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_product_service_purge_products_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_product_service_purge_products_sync.py new file mode 100644 index 000000000000..faace146193f --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_product_service_purge_products_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeProducts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2beta_generated_ProductService_PurgeProducts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2beta + + +def sample_purge_products(): + # Create a client + client = retail_v2beta.ProductServiceClient() + + # Initialize request argument(s) + request = retail_v2beta.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_products(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END retail_v2beta_generated_ProductService_PurgeProducts_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json index bfc4c4d9a930..2a73956bd4a2 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "0.1.0" + "version": "1.21.0" }, "snippets": [ { @@ -5638,6 +5638,159 @@ ], "title": "retail_v2_generated_product_service_list_products_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2.ProductServiceAsyncClient.purge_products", + "method": { + "fullName": "google.cloud.retail.v2.ProductService.PurgeProducts", + "service": { + "fullName": "google.cloud.retail.v2.ProductService", + "shortName": "ProductService" + }, + "shortName": "PurgeProducts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2.types.PurgeProductsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_products" + }, + "description": "Sample for PurgeProducts", + "file": "retail_v2_generated_product_service_purge_products_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2_generated_ProductService_PurgeProducts_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2_generated_product_service_purge_products_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2.ProductServiceClient", + "shortName": "ProductServiceClient" + }, + "fullName": "google.cloud.retail_v2.ProductServiceClient.purge_products", + "method": { + "fullName": "google.cloud.retail.v2.ProductService.PurgeProducts", + "service": { + "fullName": "google.cloud.retail.v2.ProductService", + "shortName": "ProductService" + }, + "shortName": "PurgeProducts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2.types.PurgeProductsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_products" + }, + "description": "Sample for PurgeProducts", + "file": "retail_v2_generated_product_service_purge_products_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2_generated_ProductService_PurgeProducts_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2_generated_product_service_purge_products_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json index aa123bb5763d..e1f1d61a2415 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "0.1.0" + "version": "1.21.0" }, "snippets": [ { @@ -169,22 +169,26 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.BranchServiceAsyncClient", + "shortName": "BranchServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.add_catalog_attribute", + "fullName": "google.cloud.retail_v2alpha.BranchServiceAsyncClient.get_branch", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.AddCatalogAttribute", + "fullName": "google.cloud.retail.v2alpha.BranchService.GetBranch", "service": { - "fullName": "google.cloud.retail.v2alpha.CatalogService", - "shortName": "CatalogService" + "fullName": "google.cloud.retail.v2alpha.BranchService", + "shortName": "BranchService" }, - "shortName": "AddCatalogAttribute" + "shortName": "GetBranch" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.AddCatalogAttributeRequest" + "type": "google.cloud.retail_v2alpha.types.GetBranchRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -199,22 +203,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "add_catalog_attribute" + "resultType": "google.cloud.retail_v2alpha.types.Branch", + "shortName": "get_branch" }, - "description": "Sample for AddCatalogAttribute", - "file": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_async.py", + "description": "Sample for GetBranch", + "file": "retail_v2alpha_generated_branch_service_get_branch_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_AddCatalogAttribute_async", + "regionTag": "retail_v2alpha_generated_BranchService_GetBranch_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -224,43 +228,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_async.py" + "title": "retail_v2alpha_generated_branch_service_get_branch_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", - "shortName": "CatalogServiceClient" + "fullName": "google.cloud.retail_v2alpha.BranchServiceClient", + "shortName": "BranchServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.add_catalog_attribute", + "fullName": "google.cloud.retail_v2alpha.BranchServiceClient.get_branch", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.AddCatalogAttribute", + "fullName": "google.cloud.retail.v2alpha.BranchService.GetBranch", "service": { - "fullName": "google.cloud.retail.v2alpha.CatalogService", - "shortName": "CatalogService" + "fullName": "google.cloud.retail.v2alpha.BranchService", + "shortName": "BranchService" }, - "shortName": "AddCatalogAttribute" + "shortName": "GetBranch" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.AddCatalogAttributeRequest" + "type": "google.cloud.retail_v2alpha.types.GetBranchRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -275,22 +283,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "add_catalog_attribute" + "resultType": "google.cloud.retail_v2alpha.types.Branch", + "shortName": "get_branch" }, - "description": "Sample for AddCatalogAttribute", - "file": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_sync.py", + "description": "Sample for GetBranch", + "file": "retail_v2alpha_generated_branch_service_get_branch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_AddCatalogAttribute_sync", + "regionTag": "retail_v2alpha_generated_BranchService_GetBranch_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -300,44 +308,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_sync.py" + "title": "retail_v2alpha_generated_branch_service_get_branch_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.BranchServiceAsyncClient", + "shortName": "BranchServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.batch_remove_catalog_attributes", + "fullName": "google.cloud.retail_v2alpha.BranchServiceAsyncClient.list_branches", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.BatchRemoveCatalogAttributes", + "fullName": "google.cloud.retail.v2alpha.BranchService.ListBranches", "service": { - "fullName": "google.cloud.retail.v2alpha.CatalogService", - "shortName": "CatalogService" + "fullName": "google.cloud.retail.v2alpha.BranchService", + "shortName": "BranchService" }, - "shortName": "BatchRemoveCatalogAttributes" + "shortName": "ListBranches" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesRequest" + "type": "google.cloud.retail_v2alpha.types.ListBranchesRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -352,22 +364,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesResponse", - "shortName": "batch_remove_catalog_attributes" + "resultType": "google.cloud.retail_v2alpha.types.ListBranchesResponse", + "shortName": "list_branches" }, - "description": "Sample for BatchRemoveCatalogAttributes", - "file": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_async.py", + "description": "Sample for ListBranches", + "file": "retail_v2alpha_generated_branch_service_list_branches_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_BatchRemoveCatalogAttributes_async", + "regionTag": "retail_v2alpha_generated_BranchService_ListBranches_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -377,43 +389,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_async.py" + "title": "retail_v2alpha_generated_branch_service_list_branches_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", - "shortName": "CatalogServiceClient" + "fullName": "google.cloud.retail_v2alpha.BranchServiceClient", + "shortName": "BranchServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.batch_remove_catalog_attributes", + "fullName": "google.cloud.retail_v2alpha.BranchServiceClient.list_branches", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.BatchRemoveCatalogAttributes", + "fullName": "google.cloud.retail.v2alpha.BranchService.ListBranches", "service": { - "fullName": "google.cloud.retail.v2alpha.CatalogService", - "shortName": "CatalogService" + "fullName": "google.cloud.retail.v2alpha.BranchService", + "shortName": "BranchService" }, - "shortName": "BatchRemoveCatalogAttributes" + "shortName": "ListBranches" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesRequest" + "type": "google.cloud.retail_v2alpha.types.ListBranchesRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -428,22 +444,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesResponse", - "shortName": "batch_remove_catalog_attributes" + "resultType": "google.cloud.retail_v2alpha.types.ListBranchesResponse", + "shortName": "list_branches" }, - "description": "Sample for BatchRemoveCatalogAttributes", - "file": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_sync.py", + "description": "Sample for ListBranches", + "file": "retail_v2alpha_generated_branch_service_list_branches_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_BatchRemoveCatalogAttributes_sync", + "regionTag": "retail_v2alpha_generated_BranchService_ListBranches_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -453,22 +469,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_sync.py" + "title": "retail_v2alpha_generated_branch_service_list_branches_sync.py" }, { "canonical": true, @@ -478,23 +494,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.get_attributes_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.add_catalog_attribute", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.GetAttributesConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.AddCatalogAttribute", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetAttributesConfig" + "shortName": "AddCatalogAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetAttributesConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.AddCatalogAttributeRequest" }, { "name": "retry", @@ -510,21 +522,21 @@ } ], "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "get_attributes_config" + "shortName": "add_catalog_attribute" }, - "description": "Sample for GetAttributesConfig", - "file": "retail_v2alpha_generated_catalog_service_get_attributes_config_async.py", + "description": "Sample for AddCatalogAttribute", + "file": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_GetAttributesConfig_async", + "regionTag": "retail_v2alpha_generated_CatalogService_AddCatalogAttribute_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -534,22 +546,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_get_attributes_config_async.py" + "title": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_async.py" }, { "canonical": true, @@ -558,23 +570,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.get_attributes_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.add_catalog_attribute", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.GetAttributesConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.AddCatalogAttribute", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetAttributesConfig" + "shortName": "AddCatalogAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetAttributesConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.AddCatalogAttributeRequest" }, { "name": "retry", @@ -590,21 +598,21 @@ } ], "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "get_attributes_config" + "shortName": "add_catalog_attribute" }, - "description": "Sample for GetAttributesConfig", - "file": "retail_v2alpha_generated_catalog_service_get_attributes_config_sync.py", + "description": "Sample for AddCatalogAttribute", + "file": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_GetAttributesConfig_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_AddCatalogAttribute_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -614,22 +622,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_get_attributes_config_sync.py" + "title": "retail_v2alpha_generated_catalog_service_add_catalog_attribute_sync.py" }, { "canonical": true, @@ -639,23 +647,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.get_completion_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.batch_remove_catalog_attributes", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.GetCompletionConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.BatchRemoveCatalogAttributes", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetCompletionConfig" + "shortName": "BatchRemoveCatalogAttributes" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetCompletionConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesRequest" }, { "name": "retry", @@ -670,22 +674,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", - "shortName": "get_completion_config" + "resultType": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesResponse", + "shortName": "batch_remove_catalog_attributes" }, - "description": "Sample for GetCompletionConfig", - "file": "retail_v2alpha_generated_catalog_service_get_completion_config_async.py", + "description": "Sample for BatchRemoveCatalogAttributes", + "file": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_GetCompletionConfig_async", + "regionTag": "retail_v2alpha_generated_CatalogService_BatchRemoveCatalogAttributes_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -695,22 +699,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_get_completion_config_async.py" + "title": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_async.py" }, { "canonical": true, @@ -719,23 +723,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.get_completion_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.batch_remove_catalog_attributes", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.GetCompletionConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.BatchRemoveCatalogAttributes", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetCompletionConfig" + "shortName": "BatchRemoveCatalogAttributes" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetCompletionConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesRequest" }, { "name": "retry", @@ -750,22 +750,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", - "shortName": "get_completion_config" + "resultType": "google.cloud.retail_v2alpha.types.BatchRemoveCatalogAttributesResponse", + "shortName": "batch_remove_catalog_attributes" }, - "description": "Sample for GetCompletionConfig", - "file": "retail_v2alpha_generated_catalog_service_get_completion_config_sync.py", + "description": "Sample for BatchRemoveCatalogAttributes", + "file": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_GetCompletionConfig_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_BatchRemoveCatalogAttributes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -775,22 +775,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_get_completion_config_sync.py" + "title": "retail_v2alpha_generated_catalog_service_batch_remove_catalog_attributes_sync.py" }, { "canonical": true, @@ -800,22 +800,22 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.get_default_branch", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.get_attributes_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.GetDefaultBranch", + "fullName": "google.cloud.retail.v2alpha.CatalogService.GetAttributesConfig", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetDefaultBranch" + "shortName": "GetAttributesConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetDefaultBranchRequest" + "type": "google.cloud.retail_v2alpha.types.GetAttributesConfigRequest" }, { - "name": "catalog", + "name": "name", "type": "str" }, { @@ -831,22 +831,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.GetDefaultBranchResponse", - "shortName": "get_default_branch" + "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", + "shortName": "get_attributes_config" }, - "description": "Sample for GetDefaultBranch", - "file": "retail_v2alpha_generated_catalog_service_get_default_branch_async.py", + "description": "Sample for GetAttributesConfig", + "file": "retail_v2alpha_generated_catalog_service_get_attributes_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_GetDefaultBranch_async", + "regionTag": "retail_v2alpha_generated_CatalogService_GetAttributesConfig_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -856,22 +856,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_get_default_branch_async.py" + "title": "retail_v2alpha_generated_catalog_service_get_attributes_config_async.py" }, { "canonical": true, @@ -880,22 +880,22 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.get_default_branch", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.get_attributes_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.GetDefaultBranch", + "fullName": "google.cloud.retail.v2alpha.CatalogService.GetAttributesConfig", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetDefaultBranch" + "shortName": "GetAttributesConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetDefaultBranchRequest" + "type": "google.cloud.retail_v2alpha.types.GetAttributesConfigRequest" }, { - "name": "catalog", + "name": "name", "type": "str" }, { @@ -911,22 +911,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.GetDefaultBranchResponse", - "shortName": "get_default_branch" + "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", + "shortName": "get_attributes_config" }, - "description": "Sample for GetDefaultBranch", - "file": "retail_v2alpha_generated_catalog_service_get_default_branch_sync.py", + "description": "Sample for GetAttributesConfig", + "file": "retail_v2alpha_generated_catalog_service_get_attributes_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_GetDefaultBranch_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_GetAttributesConfig_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -936,22 +936,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_get_default_branch_sync.py" + "title": "retail_v2alpha_generated_catalog_service_get_attributes_config_sync.py" }, { "canonical": true, @@ -961,22 +961,22 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.list_catalogs", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.get_completion_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.ListCatalogs", + "fullName": "google.cloud.retail.v2alpha.CatalogService.GetCompletionConfig", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "ListCatalogs" + "shortName": "GetCompletionConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListCatalogsRequest" + "type": "google.cloud.retail_v2alpha.types.GetCompletionConfigRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -992,22 +992,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.catalog_service.pagers.ListCatalogsAsyncPager", - "shortName": "list_catalogs" + "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", + "shortName": "get_completion_config" }, - "description": "Sample for ListCatalogs", - "file": "retail_v2alpha_generated_catalog_service_list_catalogs_async.py", + "description": "Sample for GetCompletionConfig", + "file": "retail_v2alpha_generated_catalog_service_get_completion_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_ListCatalogs_async", + "regionTag": "retail_v2alpha_generated_CatalogService_GetCompletionConfig_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1027,12 +1027,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_list_catalogs_async.py" + "title": "retail_v2alpha_generated_catalog_service_get_completion_config_async.py" }, { "canonical": true, @@ -1041,22 +1041,22 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.list_catalogs", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.get_completion_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.ListCatalogs", + "fullName": "google.cloud.retail.v2alpha.CatalogService.GetCompletionConfig", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "ListCatalogs" + "shortName": "GetCompletionConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListCatalogsRequest" + "type": "google.cloud.retail_v2alpha.types.GetCompletionConfigRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1072,22 +1072,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.catalog_service.pagers.ListCatalogsPager", - "shortName": "list_catalogs" + "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", + "shortName": "get_completion_config" }, - "description": "Sample for ListCatalogs", - "file": "retail_v2alpha_generated_catalog_service_list_catalogs_sync.py", + "description": "Sample for GetCompletionConfig", + "file": "retail_v2alpha_generated_catalog_service_get_completion_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_ListCatalogs_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_GetCompletionConfig_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1107,12 +1107,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_list_catalogs_sync.py" + "title": "retail_v2alpha_generated_catalog_service_get_completion_config_sync.py" }, { "canonical": true, @@ -1122,19 +1122,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.remove_catalog_attribute", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.get_default_branch", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.RemoveCatalogAttribute", + "fullName": "google.cloud.retail.v2alpha.CatalogService.GetDefaultBranch", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "RemoveCatalogAttribute" + "shortName": "GetDefaultBranch" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.RemoveCatalogAttributeRequest" + "type": "google.cloud.retail_v2alpha.types.GetDefaultBranchRequest" + }, + { + "name": "catalog", + "type": "str" }, { "name": "retry", @@ -1149,22 +1153,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "remove_catalog_attribute" + "resultType": "google.cloud.retail_v2alpha.types.GetDefaultBranchResponse", + "shortName": "get_default_branch" }, - "description": "Sample for RemoveCatalogAttribute", - "file": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_async.py", + "description": "Sample for GetDefaultBranch", + "file": "retail_v2alpha_generated_catalog_service_get_default_branch_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_RemoveCatalogAttribute_async", + "regionTag": "retail_v2alpha_generated_CatalogService_GetDefaultBranch_async", "segments": [ { - "end": 52, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 50, "start": 27, "type": "SHORT" }, @@ -1174,22 +1178,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_async.py" + "title": "retail_v2alpha_generated_catalog_service_get_default_branch_async.py" }, { "canonical": true, @@ -1198,19 +1202,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.remove_catalog_attribute", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.get_default_branch", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.RemoveCatalogAttribute", + "fullName": "google.cloud.retail.v2alpha.CatalogService.GetDefaultBranch", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "RemoveCatalogAttribute" + "shortName": "GetDefaultBranch" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.RemoveCatalogAttributeRequest" + "type": "google.cloud.retail_v2alpha.types.GetDefaultBranchRequest" + }, + { + "name": "catalog", + "type": "str" }, { "name": "retry", @@ -1225,22 +1233,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "remove_catalog_attribute" + "resultType": "google.cloud.retail_v2alpha.types.GetDefaultBranchResponse", + "shortName": "get_default_branch" }, - "description": "Sample for RemoveCatalogAttribute", - "file": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_sync.py", + "description": "Sample for GetDefaultBranch", + "file": "retail_v2alpha_generated_catalog_service_get_default_branch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_RemoveCatalogAttribute_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_GetDefaultBranch_sync", "segments": [ { - "end": 52, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 50, "start": 27, "type": "SHORT" }, @@ -1250,22 +1258,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_sync.py" + "title": "retail_v2alpha_generated_catalog_service_get_default_branch_sync.py" }, { "canonical": true, @@ -1275,19 +1283,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.replace_catalog_attribute", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.list_catalogs", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.ReplaceCatalogAttribute", + "fullName": "google.cloud.retail.v2alpha.CatalogService.ListCatalogs", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "ReplaceCatalogAttribute" + "shortName": "ListCatalogs" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ReplaceCatalogAttributeRequest" + "type": "google.cloud.retail_v2alpha.types.ListCatalogsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -1302,22 +1314,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "replace_catalog_attribute" + "resultType": "google.cloud.retail_v2alpha.services.catalog_service.pagers.ListCatalogsAsyncPager", + "shortName": "list_catalogs" }, - "description": "Sample for ReplaceCatalogAttribute", - "file": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_async.py", + "description": "Sample for ListCatalogs", + "file": "retail_v2alpha_generated_catalog_service_list_catalogs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_ReplaceCatalogAttribute_async", + "regionTag": "retail_v2alpha_generated_CatalogService_ListCatalogs_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1327,22 +1339,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_async.py" + "title": "retail_v2alpha_generated_catalog_service_list_catalogs_async.py" }, { "canonical": true, @@ -1351,19 +1363,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.replace_catalog_attribute", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.list_catalogs", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.ReplaceCatalogAttribute", + "fullName": "google.cloud.retail.v2alpha.CatalogService.ListCatalogs", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "ReplaceCatalogAttribute" + "shortName": "ListCatalogs" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ReplaceCatalogAttributeRequest" + "type": "google.cloud.retail_v2alpha.types.ListCatalogsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -1378,22 +1394,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "replace_catalog_attribute" + "resultType": "google.cloud.retail_v2alpha.services.catalog_service.pagers.ListCatalogsPager", + "shortName": "list_catalogs" }, - "description": "Sample for ReplaceCatalogAttribute", - "file": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_sync.py", + "description": "Sample for ListCatalogs", + "file": "retail_v2alpha_generated_catalog_service_list_catalogs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_ReplaceCatalogAttribute_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_ListCatalogs_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1403,22 +1419,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_sync.py" + "title": "retail_v2alpha_generated_catalog_service_list_catalogs_sync.py" }, { "canonical": true, @@ -1428,23 +1444,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.set_default_branch", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.remove_catalog_attribute", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.SetDefaultBranch", + "fullName": "google.cloud.retail.v2alpha.CatalogService.RemoveCatalogAttribute", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "SetDefaultBranch" + "shortName": "RemoveCatalogAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.SetDefaultBranchRequest" - }, - { - "name": "catalog", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.RemoveCatalogAttributeRequest" }, { "name": "retry", @@ -1459,21 +1471,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "set_default_branch" + "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", + "shortName": "remove_catalog_attribute" }, - "description": "Sample for SetDefaultBranch", - "file": "retail_v2alpha_generated_catalog_service_set_default_branch_async.py", + "description": "Sample for RemoveCatalogAttribute", + "file": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_SetDefaultBranch_async", + "regionTag": "retail_v2alpha_generated_CatalogService_RemoveCatalogAttribute_async", "segments": [ { - "end": 48, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1483,20 +1496,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_set_default_branch_async.py" + "title": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_async.py" }, { "canonical": true, @@ -1505,23 +1520,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.set_default_branch", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.remove_catalog_attribute", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.SetDefaultBranch", + "fullName": "google.cloud.retail.v2alpha.CatalogService.RemoveCatalogAttribute", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "SetDefaultBranch" + "shortName": "RemoveCatalogAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.SetDefaultBranchRequest" - }, - { - "name": "catalog", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.RemoveCatalogAttributeRequest" }, { "name": "retry", @@ -1536,21 +1547,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "set_default_branch" + "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", + "shortName": "remove_catalog_attribute" }, - "description": "Sample for SetDefaultBranch", - "file": "retail_v2alpha_generated_catalog_service_set_default_branch_sync.py", + "description": "Sample for RemoveCatalogAttribute", + "file": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_SetDefaultBranch_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_RemoveCatalogAttribute_sync", "segments": [ { - "end": 48, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1560,20 +1572,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_set_default_branch_sync.py" + "title": "retail_v2alpha_generated_catalog_service_remove_catalog_attribute_sync.py" }, { "canonical": true, @@ -1583,27 +1597,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.update_attributes_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.replace_catalog_attribute", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateAttributesConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.ReplaceCatalogAttribute", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "UpdateAttributesConfig" + "shortName": "ReplaceCatalogAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateAttributesConfigRequest" - }, - { - "name": "attributes_config", - "type": "google.cloud.retail_v2alpha.types.AttributesConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.retail_v2alpha.types.ReplaceCatalogAttributeRequest" }, { "name": "retry", @@ -1619,21 +1625,21 @@ } ], "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "update_attributes_config" + "shortName": "replace_catalog_attribute" }, - "description": "Sample for UpdateAttributesConfig", - "file": "retail_v2alpha_generated_catalog_service_update_attributes_config_async.py", + "description": "Sample for ReplaceCatalogAttribute", + "file": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_UpdateAttributesConfig_async", + "regionTag": "retail_v2alpha_generated_CatalogService_ReplaceCatalogAttribute_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1643,22 +1649,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_update_attributes_config_async.py" + "title": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_async.py" }, { "canonical": true, @@ -1667,27 +1673,19 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.update_attributes_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.replace_catalog_attribute", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateAttributesConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.ReplaceCatalogAttribute", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "UpdateAttributesConfig" + "shortName": "ReplaceCatalogAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateAttributesConfigRequest" - }, - { - "name": "attributes_config", - "type": "google.cloud.retail_v2alpha.types.AttributesConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "google.cloud.retail_v2alpha.types.ReplaceCatalogAttributeRequest" }, { "name": "retry", @@ -1703,21 +1701,21 @@ } ], "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", - "shortName": "update_attributes_config" + "shortName": "replace_catalog_attribute" }, - "description": "Sample for UpdateAttributesConfig", - "file": "retail_v2alpha_generated_catalog_service_update_attributes_config_sync.py", + "description": "Sample for ReplaceCatalogAttribute", + "file": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_UpdateAttributesConfig_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_ReplaceCatalogAttribute_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1727,22 +1725,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_update_attributes_config_sync.py" + "title": "retail_v2alpha_generated_catalog_service_replace_catalog_attribute_sync.py" }, { "canonical": true, @@ -1752,27 +1750,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.update_catalog", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.set_default_branch", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCatalog", + "fullName": "google.cloud.retail.v2alpha.CatalogService.SetDefaultBranch", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "UpdateCatalog" + "shortName": "SetDefaultBranch" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateCatalogRequest" + "type": "google.cloud.retail_v2alpha.types.SetDefaultBranchRequest" }, { "name": "catalog", - "type": "google.cloud.retail_v2alpha.types.Catalog" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "str" }, { "name": "retry", @@ -1787,22 +1781,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Catalog", - "shortName": "update_catalog" + "shortName": "set_default_branch" }, - "description": "Sample for UpdateCatalog", - "file": "retail_v2alpha_generated_catalog_service_update_catalog_async.py", + "description": "Sample for SetDefaultBranch", + "file": "retail_v2alpha_generated_catalog_service_set_default_branch_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCatalog_async", + "regionTag": "retail_v2alpha_generated_CatalogService_SetDefaultBranch_async", "segments": [ { - "end": 55, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 48, "start": 27, "type": "SHORT" }, @@ -1812,22 +1805,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_update_catalog_async.py" + "title": "retail_v2alpha_generated_catalog_service_set_default_branch_async.py" }, { "canonical": true, @@ -1836,27 +1827,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.update_catalog", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.set_default_branch", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCatalog", + "fullName": "google.cloud.retail.v2alpha.CatalogService.SetDefaultBranch", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "UpdateCatalog" + "shortName": "SetDefaultBranch" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateCatalogRequest" + "type": "google.cloud.retail_v2alpha.types.SetDefaultBranchRequest" }, { "name": "catalog", - "type": "google.cloud.retail_v2alpha.types.Catalog" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "type": "str" }, { "name": "retry", @@ -1871,22 +1858,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Catalog", - "shortName": "update_catalog" + "shortName": "set_default_branch" }, - "description": "Sample for UpdateCatalog", - "file": "retail_v2alpha_generated_catalog_service_update_catalog_sync.py", + "description": "Sample for SetDefaultBranch", + "file": "retail_v2alpha_generated_catalog_service_set_default_branch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCatalog_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_SetDefaultBranch_sync", "segments": [ { - "end": 55, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 48, "start": 27, "type": "SHORT" }, @@ -1896,22 +1882,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_update_catalog_sync.py" + "title": "retail_v2alpha_generated_catalog_service_set_default_branch_sync.py" }, { "canonical": true, @@ -1921,23 +1905,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.update_completion_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.update_attributes_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCompletionConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateAttributesConfig", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "UpdateCompletionConfig" + "shortName": "UpdateAttributesConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateCompletionConfigRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateAttributesConfigRequest" }, { - "name": "completion_config", - "type": "google.cloud.retail_v2alpha.types.CompletionConfig" + "name": "attributes_config", + "type": "google.cloud.retail_v2alpha.types.AttributesConfig" }, { "name": "update_mask", @@ -1956,14 +1940,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", - "shortName": "update_completion_config" + "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", + "shortName": "update_attributes_config" }, - "description": "Sample for UpdateCompletionConfig", - "file": "retail_v2alpha_generated_catalog_service_update_completion_config_async.py", + "description": "Sample for UpdateAttributesConfig", + "file": "retail_v2alpha_generated_catalog_service_update_attributes_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCompletionConfig_async", + "regionTag": "retail_v2alpha_generated_CatalogService_UpdateAttributesConfig_async", "segments": [ { "end": 54, @@ -1996,7 +1980,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_update_completion_config_async.py" + "title": "retail_v2alpha_generated_catalog_service_update_attributes_config_async.py" }, { "canonical": true, @@ -2005,23 +1989,23 @@ "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.update_completion_config", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.update_attributes_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCompletionConfig", + "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateAttributesConfig", "service": { "fullName": "google.cloud.retail.v2alpha.CatalogService", "shortName": "CatalogService" }, - "shortName": "UpdateCompletionConfig" + "shortName": "UpdateAttributesConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateCompletionConfigRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateAttributesConfigRequest" }, { - "name": "completion_config", - "type": "google.cloud.retail_v2alpha.types.CompletionConfig" + "name": "attributes_config", + "type": "google.cloud.retail_v2alpha.types.AttributesConfig" }, { "name": "update_mask", @@ -2040,14 +2024,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", - "shortName": "update_completion_config" + "resultType": "google.cloud.retail_v2alpha.types.AttributesConfig", + "shortName": "update_attributes_config" }, - "description": "Sample for UpdateCompletionConfig", - "file": "retail_v2alpha_generated_catalog_service_update_completion_config_sync.py", + "description": "Sample for UpdateAttributesConfig", + "file": "retail_v2alpha_generated_catalog_service_update_attributes_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCompletionConfig_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_UpdateAttributesConfig_sync", "segments": [ { "end": 54, @@ -2080,29 +2064,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_catalog_service_update_completion_config_sync.py" + "title": "retail_v2alpha_generated_catalog_service_update_attributes_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient", - "shortName": "CompletionServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient.complete_query", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.update_catalog", "method": { - "fullName": "google.cloud.retail.v2alpha.CompletionService.CompleteQuery", + "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCatalog", "service": { - "fullName": "google.cloud.retail.v2alpha.CompletionService", - "shortName": "CompletionService" + "fullName": "google.cloud.retail.v2alpha.CatalogService", + "shortName": "CatalogService" }, - "shortName": "CompleteQuery" + "shortName": "UpdateCatalog" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CompleteQueryRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateCatalogRequest" + }, + { + "name": "catalog", + "type": "google.cloud.retail_v2alpha.types.Catalog" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2117,22 +2109,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.CompleteQueryResponse", - "shortName": "complete_query" + "resultType": "google.cloud.retail_v2alpha.types.Catalog", + "shortName": "update_catalog" }, - "description": "Sample for CompleteQuery", - "file": "retail_v2alpha_generated_completion_service_complete_query_async.py", + "description": "Sample for UpdateCatalog", + "file": "retail_v2alpha_generated_catalog_service_update_catalog_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CompletionService_CompleteQuery_async", + "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCatalog_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2142,43 +2134,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_completion_service_complete_query_async.py" + "title": "retail_v2alpha_generated_catalog_service_update_catalog_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient", - "shortName": "CompletionServiceClient" + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", + "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient.complete_query", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.update_catalog", "method": { - "fullName": "google.cloud.retail.v2alpha.CompletionService.CompleteQuery", + "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCatalog", "service": { - "fullName": "google.cloud.retail.v2alpha.CompletionService", - "shortName": "CompletionService" + "fullName": "google.cloud.retail.v2alpha.CatalogService", + "shortName": "CatalogService" }, - "shortName": "CompleteQuery" + "shortName": "UpdateCatalog" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CompleteQueryRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateCatalogRequest" + }, + { + "name": "catalog", + "type": "google.cloud.retail_v2alpha.types.Catalog" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2193,22 +2193,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.CompleteQueryResponse", - "shortName": "complete_query" + "resultType": "google.cloud.retail_v2alpha.types.Catalog", + "shortName": "update_catalog" }, - "description": "Sample for CompleteQuery", - "file": "retail_v2alpha_generated_completion_service_complete_query_sync.py", + "description": "Sample for UpdateCatalog", + "file": "retail_v2alpha_generated_catalog_service_update_catalog_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CompletionService_CompleteQuery_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCatalog_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2218,44 +2218,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_completion_service_complete_query_sync.py" + "title": "retail_v2alpha_generated_catalog_service_update_catalog_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient", - "shortName": "CompletionServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient.import_completion_data", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceAsyncClient.update_completion_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CompletionService.ImportCompletionData", + "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCompletionConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.CompletionService", - "shortName": "CompletionService" + "fullName": "google.cloud.retail.v2alpha.CatalogService", + "shortName": "CatalogService" }, - "shortName": "ImportCompletionData" + "shortName": "UpdateCompletionConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ImportCompletionDataRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateCompletionConfigRequest" + }, + { + "name": "completion_config", + "type": "google.cloud.retail_v2alpha.types.CompletionConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2270,22 +2278,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_completion_data" + "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", + "shortName": "update_completion_config" }, - "description": "Sample for ImportCompletionData", - "file": "retail_v2alpha_generated_completion_service_import_completion_data_async.py", + "description": "Sample for UpdateCompletionConfig", + "file": "retail_v2alpha_generated_catalog_service_update_completion_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CompletionService_ImportCompletionData_async", + "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCompletionConfig_async", "segments": [ { - "end": 60, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2295,43 +2303,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_completion_service_import_completion_data_async.py" + "title": "retail_v2alpha_generated_catalog_service_update_completion_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient", - "shortName": "CompletionServiceClient" + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient", + "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient.import_completion_data", + "fullName": "google.cloud.retail_v2alpha.CatalogServiceClient.update_completion_config", "method": { - "fullName": "google.cloud.retail.v2alpha.CompletionService.ImportCompletionData", + "fullName": "google.cloud.retail.v2alpha.CatalogService.UpdateCompletionConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.CompletionService", - "shortName": "CompletionService" + "fullName": "google.cloud.retail.v2alpha.CatalogService", + "shortName": "CatalogService" }, - "shortName": "ImportCompletionData" + "shortName": "UpdateCompletionConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ImportCompletionDataRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateCompletionConfigRequest" + }, + { + "name": "completion_config", + "type": "google.cloud.retail_v2alpha.types.CompletionConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2346,22 +2362,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_completion_data" + "resultType": "google.cloud.retail_v2alpha.types.CompletionConfig", + "shortName": "update_completion_config" }, - "description": "Sample for ImportCompletionData", - "file": "retail_v2alpha_generated_completion_service_import_completion_data_sync.py", + "description": "Sample for UpdateCompletionConfig", + "file": "retail_v2alpha_generated_catalog_service_update_completion_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_CompletionService_ImportCompletionData_sync", + "regionTag": "retail_v2alpha_generated_CatalogService_UpdateCompletionConfig_sync", "segments": [ { - "end": 60, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2371,56 +2387,1666 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_catalog_service_update_completion_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient.complete_query", + "method": { + "fullName": "google.cloud.retail.v2alpha.CompletionService.CompleteQuery", + "service": { + "fullName": "google.cloud.retail.v2alpha.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "CompleteQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.CompleteQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.CompleteQueryResponse", + "shortName": "complete_query" + }, + "description": "Sample for CompleteQuery", + "file": "retail_v2alpha_generated_completion_service_complete_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_CompletionService_CompleteQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_completion_service_complete_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient", + "shortName": "CompletionServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient.complete_query", + "method": { + "fullName": "google.cloud.retail.v2alpha.CompletionService.CompleteQuery", + "service": { + "fullName": "google.cloud.retail.v2alpha.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "CompleteQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.CompleteQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.CompleteQueryResponse", + "shortName": "complete_query" + }, + "description": "Sample for CompleteQuery", + "file": "retail_v2alpha_generated_completion_service_complete_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_CompletionService_CompleteQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_completion_service_complete_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.CompletionServiceAsyncClient.import_completion_data", + "method": { + "fullName": "google.cloud.retail.v2alpha.CompletionService.ImportCompletionData", + "service": { + "fullName": "google.cloud.retail.v2alpha.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "ImportCompletionData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ImportCompletionDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_completion_data" + }, + "description": "Sample for ImportCompletionData", + "file": "retail_v2alpha_generated_completion_service_import_completion_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_CompletionService_ImportCompletionData_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_completion_service_import_completion_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient", + "shortName": "CompletionServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.CompletionServiceClient.import_completion_data", + "method": { + "fullName": "google.cloud.retail.v2alpha.CompletionService.ImportCompletionData", + "service": { + "fullName": "google.cloud.retail.v2alpha.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "ImportCompletionData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ImportCompletionDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_completion_data" + }, + "description": "Sample for ImportCompletionData", + "file": "retail_v2alpha_generated_completion_service_import_completion_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_CompletionService_ImportCompletionData_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_completion_service_import_completion_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.create_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.CreateControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "CreateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.CreateControlRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "control", + "type": "google.cloud.retail_v2alpha.types.Control" + }, + { + "name": "control_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.Control", + "shortName": "create_control" + }, + "description": "Sample for CreateControl", + "file": "retail_v2alpha_generated_control_service_create_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_CreateControl_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_create_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.create_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.CreateControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "CreateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.CreateControlRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "control", + "type": "google.cloud.retail_v2alpha.types.Control" + }, + { + "name": "control_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.Control", + "shortName": "create_control" + }, + "description": "Sample for CreateControl", + "file": "retail_v2alpha_generated_control_service_create_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_CreateControl_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_create_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.delete_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.DeleteControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "DeleteControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.DeleteControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_control" + }, + "description": "Sample for DeleteControl", + "file": "retail_v2alpha_generated_control_service_delete_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_DeleteControl_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_delete_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.delete_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.DeleteControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "DeleteControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.DeleteControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_control" + }, + "description": "Sample for DeleteControl", + "file": "retail_v2alpha_generated_control_service_delete_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_DeleteControl_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_delete_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.get_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.GetControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "GetControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.GetControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.Control", + "shortName": "get_control" + }, + "description": "Sample for GetControl", + "file": "retail_v2alpha_generated_control_service_get_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_GetControl_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_get_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.get_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.GetControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "GetControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.GetControlRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.Control", + "shortName": "get_control" + }, + "description": "Sample for GetControl", + "file": "retail_v2alpha_generated_control_service_get_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_GetControl_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_get_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.list_controls", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.ListControls", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "ListControls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ListControlsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.services.control_service.pagers.ListControlsAsyncPager", + "shortName": "list_controls" + }, + "description": "Sample for ListControls", + "file": "retail_v2alpha_generated_control_service_list_controls_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_ListControls_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_list_controls_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.list_controls", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.ListControls", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "ListControls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ListControlsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.services.control_service.pagers.ListControlsPager", + "shortName": "list_controls" + }, + "description": "Sample for ListControls", + "file": "retail_v2alpha_generated_control_service_list_controls_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_ListControls_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_list_controls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", + "shortName": "ControlServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.update_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.UpdateControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "UpdateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.UpdateControlRequest" + }, + { + "name": "control", + "type": "google.cloud.retail_v2alpha.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.Control", + "shortName": "update_control" + }, + "description": "Sample for UpdateControl", + "file": "retail_v2alpha_generated_control_service_update_control_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_UpdateControl_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_update_control_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", + "shortName": "ControlServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.update_control", + "method": { + "fullName": "google.cloud.retail.v2alpha.ControlService.UpdateControl", + "service": { + "fullName": "google.cloud.retail.v2alpha.ControlService", + "shortName": "ControlService" + }, + "shortName": "UpdateControl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.UpdateControlRequest" + }, + { + "name": "control", + "type": "google.cloud.retail_v2alpha.types.Control" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.Control", + "shortName": "update_control" + }, + "description": "Sample for UpdateControl", + "file": "retail_v2alpha_generated_control_service_update_control_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_ControlService_UpdateControl_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_control_service_update_control_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient", + "shortName": "MerchantCenterAccountLinkServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient.create_merchant_center_account_link", + "method": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.CreateMerchantCenterAccountLink", + "service": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", + "shortName": "MerchantCenterAccountLinkService" + }, + "shortName": "CreateMerchantCenterAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.CreateMerchantCenterAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "merchant_center_account_link", + "type": "google.cloud.retail_v2alpha.types.MerchantCenterAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_merchant_center_account_link" + }, + "description": "Sample for CreateMerchantCenterAccountLink", + "file": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_CreateMerchantCenterAccountLink_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient", + "shortName": "MerchantCenterAccountLinkServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient.create_merchant_center_account_link", + "method": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.CreateMerchantCenterAccountLink", + "service": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", + "shortName": "MerchantCenterAccountLinkService" + }, + "shortName": "CreateMerchantCenterAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.CreateMerchantCenterAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "merchant_center_account_link", + "type": "google.cloud.retail_v2alpha.types.MerchantCenterAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_merchant_center_account_link" + }, + "description": "Sample for CreateMerchantCenterAccountLink", + "file": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_CreateMerchantCenterAccountLink_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient", + "shortName": "MerchantCenterAccountLinkServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient.delete_merchant_center_account_link", + "method": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.DeleteMerchantCenterAccountLink", + "service": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", + "shortName": "MerchantCenterAccountLinkService" + }, + "shortName": "DeleteMerchantCenterAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.DeleteMerchantCenterAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_merchant_center_account_link" + }, + "description": "Sample for DeleteMerchantCenterAccountLink", + "file": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_DeleteMerchantCenterAccountLink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient", + "shortName": "MerchantCenterAccountLinkServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient.delete_merchant_center_account_link", + "method": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.DeleteMerchantCenterAccountLink", + "service": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", + "shortName": "MerchantCenterAccountLinkService" + }, + "shortName": "DeleteMerchantCenterAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.DeleteMerchantCenterAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_merchant_center_account_link" + }, + "description": "Sample for DeleteMerchantCenterAccountLink", + "file": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_DeleteMerchantCenterAccountLink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient", + "shortName": "MerchantCenterAccountLinkServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient.list_merchant_center_account_links", + "method": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks", + "service": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", + "shortName": "MerchantCenterAccountLinkService" + }, + "shortName": "ListMerchantCenterAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksResponse", + "shortName": "list_merchant_center_account_links" + }, + "description": "Sample for ListMerchantCenterAccountLinks", + "file": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_ListMerchantCenterAccountLinks_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient", + "shortName": "MerchantCenterAccountLinkServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient.list_merchant_center_account_links", + "method": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks", + "service": { + "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", + "shortName": "MerchantCenterAccountLinkService" + }, + "shortName": "ListMerchantCenterAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksResponse", + "shortName": "list_merchant_center_account_links" + }, + "description": "Sample for ListMerchantCenterAccountLinks", + "file": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_ListMerchantCenterAccountLinks_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_completion_service_import_completion_data_sync.py" + "title": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", - "shortName": "ControlServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.create_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.create_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.CreateControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.CreateModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "CreateControl" + "shortName": "CreateModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateControlRequest" + "type": "google.cloud.retail_v2alpha.types.CreateModelRequest" }, { "name": "parent", "type": "str" }, { - "name": "control", - "type": "google.cloud.retail_v2alpha.types.Control" - }, - { - "name": "control_id", - "type": "str" + "name": "model", + "type": "google.cloud.retail_v2alpha.types.Model" }, { "name": "retry", @@ -2435,22 +4061,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Control", - "shortName": "create_control" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_model" }, - "description": "Sample for CreateControl", - "file": "retail_v2alpha_generated_control_service_create_control_async.py", + "description": "Sample for CreateModel", + "file": "retail_v2alpha_generated_model_service_create_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_CreateControl_async", + "regionTag": "retail_v2alpha_generated_ModelService_CreateModel_async", "segments": [ { - "end": 58, + "end": 64, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 64, "start": 27, "type": "SHORT" }, @@ -2460,55 +4086,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 54, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 61, + "start": 55, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 65, + "start": 62, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_create_control_async.py" + "title": "retail_v2alpha_generated_model_service_create_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", - "shortName": "ControlServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.create_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.create_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.CreateControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.CreateModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "CreateControl" + "shortName": "CreateModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateControlRequest" + "type": "google.cloud.retail_v2alpha.types.CreateModelRequest" }, { "name": "parent", "type": "str" }, { - "name": "control", - "type": "google.cloud.retail_v2alpha.types.Control" - }, - { - "name": "control_id", - "type": "str" + "name": "model", + "type": "google.cloud.retail_v2alpha.types.Model" }, { "name": "retry", @@ -2523,22 +4145,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Control", - "shortName": "create_control" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_model" }, - "description": "Sample for CreateControl", - "file": "retail_v2alpha_generated_control_service_create_control_sync.py", + "description": "Sample for CreateModel", + "file": "retail_v2alpha_generated_model_service_create_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_CreateControl_sync", + "regionTag": "retail_v2alpha_generated_ModelService_CreateModel_sync", "segments": [ { - "end": 58, + "end": 64, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 64, "start": 27, "type": "SHORT" }, @@ -2548,44 +4170,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 54, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 61, + "start": 55, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 65, + "start": 62, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_create_control_sync.py" + "title": "retail_v2alpha_generated_model_service_create_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", - "shortName": "ControlServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.delete_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.delete_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.DeleteControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.DeleteModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "DeleteControl" + "shortName": "DeleteModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteControlRequest" + "type": "google.cloud.retail_v2alpha.types.DeleteModelRequest" }, { "name": "name", @@ -2604,13 +4226,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_control" + "shortName": "delete_model" }, - "description": "Sample for DeleteControl", - "file": "retail_v2alpha_generated_control_service_delete_control_async.py", + "description": "Sample for DeleteModel", + "file": "retail_v2alpha_generated_model_service_delete_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_DeleteControl_async", + "regionTag": "retail_v2alpha_generated_ModelService_DeleteModel_async", "segments": [ { "end": 49, @@ -2641,28 +4263,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_delete_control_async.py" + "title": "retail_v2alpha_generated_model_service_delete_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", - "shortName": "ControlServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.delete_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.delete_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.DeleteControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.DeleteModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "DeleteControl" + "shortName": "DeleteModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteControlRequest" + "type": "google.cloud.retail_v2alpha.types.DeleteModelRequest" }, { "name": "name", @@ -2681,13 +4303,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_control" + "shortName": "delete_model" }, - "description": "Sample for DeleteControl", - "file": "retail_v2alpha_generated_control_service_delete_control_sync.py", + "description": "Sample for DeleteModel", + "file": "retail_v2alpha_generated_model_service_delete_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_DeleteControl_sync", + "regionTag": "retail_v2alpha_generated_ModelService_DeleteModel_sync", "segments": [ { "end": 49, @@ -2718,29 +4340,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_delete_control_sync.py" + "title": "retail_v2alpha_generated_model_service_delete_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", - "shortName": "ControlServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.get_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.get_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.GetControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.GetModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "GetControl" + "shortName": "GetModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetControlRequest" + "type": "google.cloud.retail_v2alpha.types.GetModelRequest" }, { "name": "name", @@ -2759,14 +4381,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Control", - "shortName": "get_control" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "get_model" }, - "description": "Sample for GetControl", - "file": "retail_v2alpha_generated_control_service_get_control_async.py", + "description": "Sample for GetModel", + "file": "retail_v2alpha_generated_model_service_get_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_GetControl_async", + "regionTag": "retail_v2alpha_generated_ModelService_GetModel_async", "segments": [ { "end": 51, @@ -2799,28 +4421,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_get_control_async.py" + "title": "retail_v2alpha_generated_model_service_get_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", - "shortName": "ControlServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.get_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.get_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.GetControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.GetModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "GetControl" + "shortName": "GetModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetControlRequest" + "type": "google.cloud.retail_v2alpha.types.GetModelRequest" }, { "name": "name", @@ -2839,14 +4461,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Control", - "shortName": "get_control" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "get_model" }, - "description": "Sample for GetControl", - "file": "retail_v2alpha_generated_control_service_get_control_sync.py", + "description": "Sample for GetModel", + "file": "retail_v2alpha_generated_model_service_get_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_GetControl_sync", + "regionTag": "retail_v2alpha_generated_ModelService_GetModel_sync", "segments": [ { "end": 51, @@ -2879,29 +4501,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_get_control_sync.py" + "title": "retail_v2alpha_generated_model_service_get_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", - "shortName": "ControlServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.list_controls", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.list_models", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.ListControls", + "fullName": "google.cloud.retail.v2alpha.ModelService.ListModels", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "ListControls" + "shortName": "ListModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListControlsRequest" + "type": "google.cloud.retail_v2alpha.types.ListModelsRequest" }, { "name": "parent", @@ -2920,14 +4542,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.control_service.pagers.ListControlsAsyncPager", - "shortName": "list_controls" + "resultType": "google.cloud.retail_v2alpha.services.model_service.pagers.ListModelsAsyncPager", + "shortName": "list_models" }, - "description": "Sample for ListControls", - "file": "retail_v2alpha_generated_control_service_list_controls_async.py", + "description": "Sample for ListModels", + "file": "retail_v2alpha_generated_model_service_list_models_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_ListControls_async", + "regionTag": "retail_v2alpha_generated_ModelService_ListModels_async", "segments": [ { "end": 52, @@ -2960,28 +4582,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_list_controls_async.py" + "title": "retail_v2alpha_generated_model_service_list_models_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", - "shortName": "ControlServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.list_controls", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.list_models", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.ListControls", + "fullName": "google.cloud.retail.v2alpha.ModelService.ListModels", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "ListControls" + "shortName": "ListModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListControlsRequest" + "type": "google.cloud.retail_v2alpha.types.ListModelsRequest" }, { "name": "parent", @@ -3000,14 +4622,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.control_service.pagers.ListControlsPager", - "shortName": "list_controls" + "resultType": "google.cloud.retail_v2alpha.services.model_service.pagers.ListModelsPager", + "shortName": "list_models" }, - "description": "Sample for ListControls", - "file": "retail_v2alpha_generated_control_service_list_controls_sync.py", + "description": "Sample for ListModels", + "file": "retail_v2alpha_generated_model_service_list_models_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_ListControls_sync", + "regionTag": "retail_v2alpha_generated_ModelService_ListModels_sync", "segments": [ { "end": 52, @@ -3040,37 +4662,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_list_controls_sync.py" + "title": "retail_v2alpha_generated_model_service_list_models_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient", - "shortName": "ControlServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceAsyncClient.update_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.pause_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.UpdateControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.PauseModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "UpdateControl" + "shortName": "PauseModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateControlRequest" - }, - { - "name": "control", - "type": "google.cloud.retail_v2alpha.types.Control" + "type": "google.cloud.retail_v2alpha.types.PauseModelRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -3085,22 +4703,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Control", - "shortName": "update_control" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "pause_model" }, - "description": "Sample for UpdateControl", - "file": "retail_v2alpha_generated_control_service_update_control_async.py", + "description": "Sample for PauseModel", + "file": "retail_v2alpha_generated_model_service_pause_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_UpdateControl_async", + "regionTag": "retail_v2alpha_generated_ModelService_PauseModel_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3110,51 +4728,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_update_control_async.py" + "title": "retail_v2alpha_generated_model_service_pause_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient", - "shortName": "ControlServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ControlServiceClient.update_control", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.pause_model", "method": { - "fullName": "google.cloud.retail.v2alpha.ControlService.UpdateControl", + "fullName": "google.cloud.retail.v2alpha.ModelService.PauseModel", "service": { - "fullName": "google.cloud.retail.v2alpha.ControlService", - "shortName": "ControlService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "UpdateControl" + "shortName": "PauseModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateControlRequest" - }, - { - "name": "control", - "type": "google.cloud.retail_v2alpha.types.Control" + "type": "google.cloud.retail_v2alpha.types.PauseModelRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -3169,22 +4783,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Control", - "shortName": "update_control" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "pause_model" }, - "description": "Sample for UpdateControl", - "file": "retail_v2alpha_generated_control_service_update_control_sync.py", + "description": "Sample for PauseModel", + "file": "retail_v2alpha_generated_model_service_pause_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ControlService_UpdateControl_sync", + "regionTag": "retail_v2alpha_generated_ModelService_PauseModel_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3194,53 +4808,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_control_service_update_control_sync.py" + "title": "retail_v2alpha_generated_model_service_pause_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient", - "shortName": "MerchantCenterAccountLinkServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient.create_merchant_center_account_link", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.resume_model", "method": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.CreateMerchantCenterAccountLink", + "fullName": "google.cloud.retail.v2alpha.ModelService.ResumeModel", "service": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", - "shortName": "MerchantCenterAccountLinkService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "CreateMerchantCenterAccountLink" + "shortName": "ResumeModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateMerchantCenterAccountLinkRequest" + "type": "google.cloud.retail_v2alpha.types.ResumeModelRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "merchant_center_account_link", - "type": "google.cloud.retail_v2alpha.types.MerchantCenterAccountLink" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3254,22 +4864,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_merchant_center_account_link" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "resume_model" }, - "description": "Sample for CreateMerchantCenterAccountLink", - "file": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_async.py", + "description": "Sample for ResumeModel", + "file": "retail_v2alpha_generated_model_service_resume_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_CreateMerchantCenterAccountLink_async", + "regionTag": "retail_v2alpha_generated_ModelService_ResumeModel_async", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3279,52 +4889,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_async.py" + "title": "retail_v2alpha_generated_model_service_resume_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient", - "shortName": "MerchantCenterAccountLinkServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient.create_merchant_center_account_link", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.resume_model", "method": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.CreateMerchantCenterAccountLink", + "fullName": "google.cloud.retail.v2alpha.ModelService.ResumeModel", "service": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", - "shortName": "MerchantCenterAccountLinkService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "CreateMerchantCenterAccountLink" + "shortName": "ResumeModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateMerchantCenterAccountLinkRequest" + "type": "google.cloud.retail_v2alpha.types.ResumeModelRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "merchant_center_account_link", - "type": "google.cloud.retail_v2alpha.types.MerchantCenterAccountLink" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3338,22 +4944,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_merchant_center_account_link" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "resume_model" }, - "description": "Sample for CreateMerchantCenterAccountLink", - "file": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_sync.py", + "description": "Sample for ResumeModel", + "file": "retail_v2alpha_generated_model_service_resume_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_CreateMerchantCenterAccountLink_sync", + "regionTag": "retail_v2alpha_generated_ModelService_ResumeModel_sync", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3363,44 +4969,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_merchant_center_account_link_service_create_merchant_center_account_link_sync.py" + "title": "retail_v2alpha_generated_model_service_resume_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient", - "shortName": "MerchantCenterAccountLinkServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient.delete_merchant_center_account_link", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.tune_model", "method": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.DeleteMerchantCenterAccountLink", + "fullName": "google.cloud.retail.v2alpha.ModelService.TuneModel", "service": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", - "shortName": "MerchantCenterAccountLinkService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "DeleteMerchantCenterAccountLink" + "shortName": "TuneModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteMerchantCenterAccountLinkRequest" + "type": "google.cloud.retail_v2alpha.types.TuneModelRequest" }, { "name": "name", @@ -3419,21 +5025,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_merchant_center_account_link" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "tune_model" }, - "description": "Sample for DeleteMerchantCenterAccountLink", - "file": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_async.py", + "description": "Sample for TuneModel", + "file": "retail_v2alpha_generated_model_service_tune_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_DeleteMerchantCenterAccountLink_async", + "regionTag": "retail_v2alpha_generated_ModelService_TuneModel_async", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3448,36 +5055,38 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_async.py" + "title": "retail_v2alpha_generated_model_service_tune_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient", - "shortName": "MerchantCenterAccountLinkServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient.delete_merchant_center_account_link", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.tune_model", "method": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.DeleteMerchantCenterAccountLink", + "fullName": "google.cloud.retail.v2alpha.ModelService.TuneModel", "service": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", - "shortName": "MerchantCenterAccountLinkService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "DeleteMerchantCenterAccountLink" + "shortName": "TuneModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteMerchantCenterAccountLinkRequest" + "type": "google.cloud.retail_v2alpha.types.TuneModelRequest" }, { "name": "name", @@ -3496,21 +5105,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_merchant_center_account_link" + "resultType": "google.api_core.operation.Operation", + "shortName": "tune_model" }, - "description": "Sample for DeleteMerchantCenterAccountLink", - "file": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_sync.py", + "description": "Sample for TuneModel", + "file": "retail_v2alpha_generated_model_service_tune_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_DeleteMerchantCenterAccountLink_sync", + "regionTag": "retail_v2alpha_generated_ModelService_TuneModel_sync", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3525,41 +5135,47 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_merchant_center_account_link_service_delete_merchant_center_account_link_sync.py" + "title": "retail_v2alpha_generated_model_service_tune_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient", - "shortName": "MerchantCenterAccountLinkServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceAsyncClient.list_merchant_center_account_links", + "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.update_model", "method": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks", + "fullName": "google.cloud.retail.v2alpha.ModelService.UpdateModel", "service": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", - "shortName": "MerchantCenterAccountLinkService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "ListMerchantCenterAccountLinks" + "shortName": "UpdateModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateModelRequest" }, { - "name": "parent", - "type": "str" + "name": "model", + "type": "google.cloud.retail_v2alpha.types.Model" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -3574,22 +5190,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksResponse", - "shortName": "list_merchant_center_account_links" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "update_model" }, - "description": "Sample for ListMerchantCenterAccountLinks", - "file": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_async.py", + "description": "Sample for UpdateModel", + "file": "retail_v2alpha_generated_model_service_update_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_ListMerchantCenterAccountLinks_async", + "regionTag": "retail_v2alpha_generated_ModelService_UpdateModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3599,47 +5215,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_async.py" + "title": "retail_v2alpha_generated_model_service_update_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient", - "shortName": "MerchantCenterAccountLinkServiceClient" + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", + "shortName": "ModelServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.MerchantCenterAccountLinkServiceClient.list_merchant_center_account_links", + "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.update_model", "method": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks", + "fullName": "google.cloud.retail.v2alpha.ModelService.UpdateModel", "service": { - "fullName": "google.cloud.retail.v2alpha.MerchantCenterAccountLinkService", - "shortName": "MerchantCenterAccountLinkService" + "fullName": "google.cloud.retail.v2alpha.ModelService", + "shortName": "ModelService" }, - "shortName": "ListMerchantCenterAccountLinks" + "shortName": "UpdateModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateModelRequest" }, { - "name": "parent", - "type": "str" + "name": "model", + "type": "google.cloud.retail_v2alpha.types.Model" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -3654,22 +5274,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.ListMerchantCenterAccountLinksResponse", - "shortName": "list_merchant_center_account_links" + "resultType": "google.cloud.retail_v2alpha.types.Model", + "shortName": "update_model" }, - "description": "Sample for ListMerchantCenterAccountLinks", - "file": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_sync.py", + "description": "Sample for UpdateModel", + "file": "retail_v2alpha_generated_model_service_update_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_MerchantCenterAccountLinkService_ListMerchantCenterAccountLinks_sync", + "regionTag": "retail_v2alpha_generated_ModelService_UpdateModel_sync", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3679,52 +5299,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_merchant_center_account_link_service_list_merchant_center_account_links_sync.py" + "title": "retail_v2alpha_generated_model_service_update_model_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.PredictionServiceAsyncClient", + "shortName": "PredictionServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.create_model", + "fullName": "google.cloud.retail_v2alpha.PredictionServiceAsyncClient.predict", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.CreateModel", + "fullName": "google.cloud.retail.v2alpha.PredictionService.Predict", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.PredictionService", + "shortName": "PredictionService" }, - "shortName": "CreateModel" + "shortName": "Predict" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateModelRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "google.cloud.retail_v2alpha.types.Model" + "type": "google.cloud.retail_v2alpha.types.PredictRequest" }, { "name": "retry", @@ -3739,22 +5351,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_model" + "resultType": "google.cloud.retail_v2alpha.types.PredictResponse", + "shortName": "predict" }, - "description": "Sample for CreateModel", - "file": "retail_v2alpha_generated_model_service_create_model_async.py", + "description": "Sample for Predict", + "file": "retail_v2alpha_generated_prediction_service_predict_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_CreateModel_async", + "regionTag": "retail_v2alpha_generated_PredictionService_Predict_async", "segments": [ { - "end": 64, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 56, "start": 27, "type": "SHORT" }, @@ -3764,51 +5376,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_create_model_async.py" + "title": "retail_v2alpha_generated_prediction_service_predict_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.PredictionServiceClient", + "shortName": "PredictionServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.create_model", + "fullName": "google.cloud.retail_v2alpha.PredictionServiceClient.predict", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.CreateModel", + "fullName": "google.cloud.retail.v2alpha.PredictionService.Predict", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.PredictionService", + "shortName": "PredictionService" }, - "shortName": "CreateModel" + "shortName": "Predict" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateModelRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "google.cloud.retail_v2alpha.types.Model" + "type": "google.cloud.retail_v2alpha.types.PredictRequest" }, { "name": "retry", @@ -3823,22 +5427,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_model" + "resultType": "google.cloud.retail_v2alpha.types.PredictResponse", + "shortName": "predict" }, - "description": "Sample for CreateModel", - "file": "retail_v2alpha_generated_model_service_create_model_sync.py", + "description": "Sample for Predict", + "file": "retail_v2alpha_generated_prediction_service_predict_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_CreateModel_sync", + "regionTag": "retail_v2alpha_generated_PredictionService_Predict_sync", "segments": [ { - "end": 64, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 56, "start": 27, "type": "SHORT" }, @@ -3848,47 +5452,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_create_model_sync.py" + "title": "retail_v2alpha_generated_prediction_service_predict_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.delete_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.add_fulfillment_places", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.DeleteModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "DeleteModel" + "shortName": "AddFulfillmentPlaces" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteModelRequest" + "type": "google.cloud.retail_v2alpha.types.AddFulfillmentPlacesRequest" }, { - "name": "name", + "name": "product", "type": "str" }, { @@ -3904,21 +5508,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_model" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "add_fulfillment_places" }, - "description": "Sample for DeleteModel", - "file": "retail_v2alpha_generated_model_service_delete_model_async.py", + "description": "Sample for AddFulfillmentPlaces", + "file": "retail_v2alpha_generated_product_service_add_fulfillment_places_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_DeleteModel_async", + "regionTag": "retail_v2alpha_generated_ProductService_AddFulfillmentPlaces_async", "segments": [ { - "end": 49, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 57, "start": 27, "type": "SHORT" }, @@ -3928,44 +5533,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_delete_model_async.py" + "title": "retail_v2alpha_generated_product_service_add_fulfillment_places_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.delete_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.add_fulfillment_places", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.DeleteModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "DeleteModel" + "shortName": "AddFulfillmentPlaces" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteModelRequest" + "type": "google.cloud.retail_v2alpha.types.AddFulfillmentPlacesRequest" }, { - "name": "name", + "name": "product", "type": "str" }, { @@ -3981,21 +5588,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_model" + "resultType": "google.api_core.operation.Operation", + "shortName": "add_fulfillment_places" }, - "description": "Sample for DeleteModel", - "file": "retail_v2alpha_generated_model_service_delete_model_sync.py", + "description": "Sample for AddFulfillmentPlaces", + "file": "retail_v2alpha_generated_product_service_add_fulfillment_places_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_DeleteModel_sync", + "regionTag": "retail_v2alpha_generated_ProductService_AddFulfillmentPlaces_sync", "segments": [ { - "end": 49, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 57, "start": 27, "type": "SHORT" }, @@ -4005,45 +5613,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_delete_model_sync.py" + "title": "retail_v2alpha_generated_product_service_add_fulfillment_places_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.get_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.add_local_inventories", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.GetModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.AddLocalInventories", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "GetModel" + "shortName": "AddLocalInventories" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetModelRequest" + "type": "google.cloud.retail_v2alpha.types.AddLocalInventoriesRequest" }, { - "name": "name", + "name": "product", "type": "str" }, { @@ -4059,22 +5669,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "get_model" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "add_local_inventories" }, - "description": "Sample for GetModel", - "file": "retail_v2alpha_generated_model_service_get_model_async.py", + "description": "Sample for AddLocalInventories", + "file": "retail_v2alpha_generated_product_service_add_local_inventories_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_GetModel_async", + "regionTag": "retail_v2alpha_generated_ProductService_AddLocalInventories_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4089,41 +5699,41 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_get_model_async.py" + "title": "retail_v2alpha_generated_product_service_add_local_inventories_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.get_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.add_local_inventories", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.GetModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.AddLocalInventories", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "GetModel" + "shortName": "AddLocalInventories" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetModelRequest" + "type": "google.cloud.retail_v2alpha.types.AddLocalInventoriesRequest" }, { - "name": "name", + "name": "product", "type": "str" }, { @@ -4139,22 +5749,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "get_model" + "resultType": "google.api_core.operation.Operation", + "shortName": "add_local_inventories" }, - "description": "Sample for GetModel", - "file": "retail_v2alpha_generated_model_service_get_model_sync.py", + "description": "Sample for AddLocalInventories", + "file": "retail_v2alpha_generated_product_service_add_local_inventories_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_GetModel_sync", + "regionTag": "retail_v2alpha_generated_ProductService_AddLocalInventories_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4169,44 +5779,52 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_get_model_sync.py" + "title": "retail_v2alpha_generated_product_service_add_local_inventories_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.list_models", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.create_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.ListModels", + "fullName": "google.cloud.retail.v2alpha.ProductService.CreateProduct", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "ListModels" + "shortName": "CreateProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListModelsRequest" + "type": "google.cloud.retail_v2alpha.types.CreateProductRequest" }, { "name": "parent", "type": "str" }, + { + "name": "product", + "type": "google.cloud.retail_v2alpha.types.Product" + }, + { + "name": "product_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4220,22 +5838,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.model_service.pagers.ListModelsAsyncPager", - "shortName": "list_models" + "resultType": "google.cloud.retail_v2alpha.types.Product", + "shortName": "create_product" }, - "description": "Sample for ListModels", - "file": "retail_v2alpha_generated_model_service_list_models_async.py", + "description": "Sample for CreateProduct", + "file": "retail_v2alpha_generated_product_service_create_product_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_ListModels_async", + "regionTag": "retail_v2alpha_generated_ProductService_CreateProduct_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -4245,48 +5863,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_list_models_async.py" + "title": "retail_v2alpha_generated_product_service_create_product_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.list_models", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.create_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.ListModels", + "fullName": "google.cloud.retail.v2alpha.ProductService.CreateProduct", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "ListModels" + "shortName": "CreateProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListModelsRequest" + "type": "google.cloud.retail_v2alpha.types.CreateProductRequest" }, { "name": "parent", "type": "str" }, + { + "name": "product", + "type": "google.cloud.retail_v2alpha.types.Product" + }, + { + "name": "product_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4300,22 +5926,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.model_service.pagers.ListModelsPager", - "shortName": "list_models" + "resultType": "google.cloud.retail_v2alpha.types.Product", + "shortName": "create_product" }, - "description": "Sample for ListModels", - "file": "retail_v2alpha_generated_model_service_list_models_sync.py", + "description": "Sample for CreateProduct", + "file": "retail_v2alpha_generated_product_service_create_product_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_ListModels_sync", + "regionTag": "retail_v2alpha_generated_ProductService_CreateProduct_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -4325,44 +5951,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_list_models_sync.py" + "title": "retail_v2alpha_generated_product_service_create_product_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.pause_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.delete_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.PauseModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.DeleteProduct", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "PauseModel" + "shortName": "DeleteProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.PauseModelRequest" + "type": "google.cloud.retail_v2alpha.types.DeleteProductRequest" }, { "name": "name", @@ -4381,22 +6007,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "pause_model" + "shortName": "delete_product" }, - "description": "Sample for PauseModel", - "file": "retail_v2alpha_generated_model_service_pause_model_async.py", + "description": "Sample for DeleteProduct", + "file": "retail_v2alpha_generated_product_service_delete_product_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_PauseModel_async", + "regionTag": "retail_v2alpha_generated_ProductService_DeleteProduct_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -4411,38 +6036,36 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_pause_model_async.py" + "title": "retail_v2alpha_generated_product_service_delete_product_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.pause_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.delete_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.PauseModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.DeleteProduct", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "PauseModel" + "shortName": "DeleteProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.PauseModelRequest" + "type": "google.cloud.retail_v2alpha.types.DeleteProductRequest" }, { "name": "name", @@ -4461,22 +6084,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "pause_model" + "shortName": "delete_product" }, - "description": "Sample for PauseModel", - "file": "retail_v2alpha_generated_model_service_pause_model_sync.py", + "description": "Sample for DeleteProduct", + "file": "retail_v2alpha_generated_product_service_delete_product_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_PauseModel_sync", + "regionTag": "retail_v2alpha_generated_ProductService_DeleteProduct_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -4491,39 +6113,37 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_pause_model_sync.py" + "title": "retail_v2alpha_generated_product_service_delete_product_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.resume_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.get_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.ResumeModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.GetProduct", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "ResumeModel" + "shortName": "GetProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ResumeModelRequest" + "type": "google.cloud.retail_v2alpha.types.GetProductRequest" }, { "name": "name", @@ -4542,14 +6162,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "resume_model" + "resultType": "google.cloud.retail_v2alpha.types.Product", + "shortName": "get_product" }, - "description": "Sample for ResumeModel", - "file": "retail_v2alpha_generated_model_service_resume_model_async.py", + "description": "Sample for GetProduct", + "file": "retail_v2alpha_generated_product_service_get_product_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_ResumeModel_async", + "regionTag": "retail_v2alpha_generated_ProductService_GetProduct_async", "segments": [ { "end": 51, @@ -4582,28 +6202,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_resume_model_async.py" + "title": "retail_v2alpha_generated_product_service_get_product_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.resume_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.get_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.ResumeModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.GetProduct", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "ResumeModel" + "shortName": "GetProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ResumeModelRequest" + "type": "google.cloud.retail_v2alpha.types.GetProductRequest" }, { "name": "name", @@ -4622,14 +6242,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "resume_model" + "resultType": "google.cloud.retail_v2alpha.types.Product", + "shortName": "get_product" }, - "description": "Sample for ResumeModel", - "file": "retail_v2alpha_generated_model_service_resume_model_sync.py", + "description": "Sample for GetProduct", + "file": "retail_v2alpha_generated_product_service_get_product_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_ResumeModel_sync", + "regionTag": "retail_v2alpha_generated_ProductService_GetProduct_sync", "segments": [ { "end": 51, @@ -4662,33 +6282,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_resume_model_sync.py" + "title": "retail_v2alpha_generated_product_service_get_product_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.tune_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.import_products", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.TuneModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.ImportProducts", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "TuneModel" + "shortName": "ImportProducts" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.TuneModelRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.ImportProductsRequest" }, { "name": "retry", @@ -4704,21 +6320,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "tune_model" + "shortName": "import_products" }, - "description": "Sample for TuneModel", - "file": "retail_v2alpha_generated_model_service_tune_model_async.py", + "description": "Sample for ImportProducts", + "file": "retail_v2alpha_generated_product_service_import_products_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_TuneModel_async", + "regionTag": "retail_v2alpha_generated_ProductService_ImportProducts_async", "segments": [ { - "end": 55, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 59, "start": 27, "type": "SHORT" }, @@ -4728,47 +6344,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_tune_model_async.py" + "title": "retail_v2alpha_generated_product_service_import_products_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.tune_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.import_products", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.TuneModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.ImportProducts", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "TuneModel" + "shortName": "ImportProducts" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.TuneModelRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.ImportProductsRequest" }, { "name": "retry", @@ -4784,21 +6396,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "tune_model" + "shortName": "import_products" }, - "description": "Sample for TuneModel", - "file": "retail_v2alpha_generated_model_service_tune_model_sync.py", + "description": "Sample for ImportProducts", + "file": "retail_v2alpha_generated_product_service_import_products_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_TuneModel_sync", + "regionTag": "retail_v2alpha_generated_ProductService_ImportProducts_sync", "segments": [ { - "end": 55, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 59, "start": 27, "type": "SHORT" }, @@ -4808,52 +6420,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_tune_model_sync.py" + "title": "retail_v2alpha_generated_product_service_import_products_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient", - "shortName": "ModelServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceAsyncClient.update_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.list_products", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.UpdateModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.ListProducts", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "UpdateModel" + "shortName": "ListProducts" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateModelRequest" - }, - { - "name": "model", - "type": "google.cloud.retail_v2alpha.types.Model" + "type": "google.cloud.retail_v2alpha.types.ListProductsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" }, { "name": "retry", @@ -4868,22 +6476,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "update_model" + "resultType": "google.cloud.retail_v2alpha.services.product_service.pagers.ListProductsAsyncPager", + "shortName": "list_products" }, - "description": "Sample for UpdateModel", - "file": "retail_v2alpha_generated_model_service_update_model_async.py", + "description": "Sample for ListProducts", + "file": "retail_v2alpha_generated_product_service_list_products_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_UpdateModel_async", + "regionTag": "retail_v2alpha_generated_ProductService_ListProducts_async", "segments": [ { - "end": 59, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4893,51 +6501,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 54, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_update_model_async.py" + "title": "retail_v2alpha_generated_product_service_list_products_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient", - "shortName": "ModelServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ModelServiceClient.update_model", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.list_products", "method": { - "fullName": "google.cloud.retail.v2alpha.ModelService.UpdateModel", + "fullName": "google.cloud.retail.v2alpha.ProductService.ListProducts", "service": { - "fullName": "google.cloud.retail.v2alpha.ModelService", - "shortName": "ModelService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "UpdateModel" + "shortName": "ListProducts" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateModelRequest" - }, - { - "name": "model", - "type": "google.cloud.retail_v2alpha.types.Model" + "type": "google.cloud.retail_v2alpha.types.ListProductsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" }, { "name": "retry", @@ -4952,22 +6556,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Model", - "shortName": "update_model" + "resultType": "google.cloud.retail_v2alpha.services.product_service.pagers.ListProductsPager", + "shortName": "list_products" }, - "description": "Sample for UpdateModel", - "file": "retail_v2alpha_generated_model_service_update_model_sync.py", + "description": "Sample for ListProducts", + "file": "retail_v2alpha_generated_product_service_list_products_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ModelService_UpdateModel_sync", + "regionTag": "retail_v2alpha_generated_ProductService_ListProducts_sync", "segments": [ { - "end": 59, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4977,44 +6581,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 54, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_model_service_update_model_sync.py" + "title": "retail_v2alpha_generated_product_service_list_products_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.PredictionServiceAsyncClient", - "shortName": "PredictionServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.PredictionServiceAsyncClient.predict", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.purge_products", "method": { - "fullName": "google.cloud.retail.v2alpha.PredictionService.Predict", + "fullName": "google.cloud.retail.v2alpha.ProductService.PurgeProducts", "service": { - "fullName": "google.cloud.retail.v2alpha.PredictionService", - "shortName": "PredictionService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "Predict" + "shortName": "PurgeProducts" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.PredictRequest" + "type": "google.cloud.retail_v2alpha.types.PurgeProductsRequest" }, { "name": "retry", @@ -5029,14 +6633,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.PredictResponse", - "shortName": "predict" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_products" }, - "description": "Sample for Predict", - "file": "retail_v2alpha_generated_prediction_service_predict_async.py", + "description": "Sample for PurgeProducts", + "file": "retail_v2alpha_generated_product_service_purge_products_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_PredictionService_Predict_async", + "regionTag": "retail_v2alpha_generated_ProductService_PurgeProducts_async", "segments": [ { "end": 56, @@ -5054,13 +6658,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 53, - "start": 51, + "start": 47, "type": "REQUEST_EXECUTION" }, { @@ -5069,28 +6673,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_prediction_service_predict_async.py" + "title": "retail_v2alpha_generated_product_service_purge_products_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.PredictionServiceClient", - "shortName": "PredictionServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", + "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.PredictionServiceClient.predict", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.purge_products", "method": { - "fullName": "google.cloud.retail.v2alpha.PredictionService.Predict", + "fullName": "google.cloud.retail.v2alpha.ProductService.PurgeProducts", "service": { - "fullName": "google.cloud.retail.v2alpha.PredictionService", - "shortName": "PredictionService" + "fullName": "google.cloud.retail.v2alpha.ProductService", + "shortName": "ProductService" }, - "shortName": "Predict" + "shortName": "PurgeProducts" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.PredictRequest" + "type": "google.cloud.retail_v2alpha.types.PurgeProductsRequest" }, { "name": "retry", @@ -5105,14 +6709,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.PredictResponse", - "shortName": "predict" + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_products" }, - "description": "Sample for Predict", - "file": "retail_v2alpha_generated_prediction_service_predict_sync.py", + "description": "Sample for PurgeProducts", + "file": "retail_v2alpha_generated_product_service_purge_products_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_PredictionService_Predict_sync", + "regionTag": "retail_v2alpha_generated_ProductService_PurgeProducts_sync", "segments": [ { "end": 56, @@ -5130,13 +6734,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 53, - "start": 51, + "start": 47, "type": "REQUEST_EXECUTION" }, { @@ -5145,7 +6749,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_prediction_service_predict_sync.py" + "title": "retail_v2alpha_generated_product_service_purge_products_sync.py" }, { "canonical": true, @@ -5155,19 +6759,19 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.add_fulfillment_places", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.remove_fulfillment_places", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces", + "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "AddFulfillmentPlaces" + "shortName": "RemoveFulfillmentPlaces" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.AddFulfillmentPlacesRequest" + "type": "google.cloud.retail_v2alpha.types.RemoveFulfillmentPlacesRequest" }, { "name": "product", @@ -5187,13 +6791,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "add_fulfillment_places" + "shortName": "remove_fulfillment_places" }, - "description": "Sample for AddFulfillmentPlaces", - "file": "retail_v2alpha_generated_product_service_add_fulfillment_places_async.py", + "description": "Sample for RemoveFulfillmentPlaces", + "file": "retail_v2alpha_generated_product_service_remove_fulfillment_places_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_AddFulfillmentPlaces_async", + "regionTag": "retail_v2alpha_generated_ProductService_RemoveFulfillmentPlaces_async", "segments": [ { "end": 57, @@ -5226,7 +6830,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_add_fulfillment_places_async.py" + "title": "retail_v2alpha_generated_product_service_remove_fulfillment_places_async.py" }, { "canonical": true, @@ -5235,19 +6839,19 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.add_fulfillment_places", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.remove_fulfillment_places", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces", + "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "AddFulfillmentPlaces" + "shortName": "RemoveFulfillmentPlaces" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.AddFulfillmentPlacesRequest" + "type": "google.cloud.retail_v2alpha.types.RemoveFulfillmentPlacesRequest" }, { "name": "product", @@ -5267,13 +6871,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "add_fulfillment_places" + "shortName": "remove_fulfillment_places" }, - "description": "Sample for AddFulfillmentPlaces", - "file": "retail_v2alpha_generated_product_service_add_fulfillment_places_sync.py", + "description": "Sample for RemoveFulfillmentPlaces", + "file": "retail_v2alpha_generated_product_service_remove_fulfillment_places_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_AddFulfillmentPlaces_sync", + "regionTag": "retail_v2alpha_generated_ProductService_RemoveFulfillmentPlaces_sync", "segments": [ { "end": 57, @@ -5306,7 +6910,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_add_fulfillment_places_sync.py" + "title": "retail_v2alpha_generated_product_service_remove_fulfillment_places_sync.py" }, { "canonical": true, @@ -5316,19 +6920,19 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.add_local_inventories", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.remove_local_inventories", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.AddLocalInventories", + "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "AddLocalInventories" + "shortName": "RemoveLocalInventories" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.AddLocalInventoriesRequest" + "type": "google.cloud.retail_v2alpha.types.RemoveLocalInventoriesRequest" }, { "name": "product", @@ -5348,21 +6952,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "add_local_inventories" + "shortName": "remove_local_inventories" }, - "description": "Sample for AddLocalInventories", - "file": "retail_v2alpha_generated_product_service_add_local_inventories_async.py", + "description": "Sample for RemoveLocalInventories", + "file": "retail_v2alpha_generated_product_service_remove_local_inventories_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_AddLocalInventories_async", + "regionTag": "retail_v2alpha_generated_ProductService_RemoveLocalInventories_async", "segments": [ { - "end": 55, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 56, "start": 27, "type": "SHORT" }, @@ -5372,22 +6976,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_add_local_inventories_async.py" + "title": "retail_v2alpha_generated_product_service_remove_local_inventories_async.py" }, { "canonical": true, @@ -5396,19 +7000,19 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.add_local_inventories", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.remove_local_inventories", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.AddLocalInventories", + "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "AddLocalInventories" + "shortName": "RemoveLocalInventories" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.AddLocalInventoriesRequest" + "type": "google.cloud.retail_v2alpha.types.RemoveLocalInventoriesRequest" }, { "name": "product", @@ -5428,21 +7032,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "add_local_inventories" + "shortName": "remove_local_inventories" }, - "description": "Sample for AddLocalInventories", - "file": "retail_v2alpha_generated_product_service_add_local_inventories_sync.py", + "description": "Sample for RemoveLocalInventories", + "file": "retail_v2alpha_generated_product_service_remove_local_inventories_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_AddLocalInventories_sync", + "regionTag": "retail_v2alpha_generated_ProductService_RemoveLocalInventories_sync", "segments": [ { - "end": 55, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 56, "start": 27, "type": "SHORT" }, @@ -5452,22 +7056,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_add_local_inventories_sync.py" + "title": "retail_v2alpha_generated_product_service_remove_local_inventories_sync.py" }, { "canonical": true, @@ -5477,31 +7081,27 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.create_product", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.set_inventory", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.CreateProduct", + "fullName": "google.cloud.retail.v2alpha.ProductService.SetInventory", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "CreateProduct" + "shortName": "SetInventory" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateProductRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.retail_v2alpha.types.SetInventoryRequest" }, { - "name": "product", + "name": "inventory", "type": "google.cloud.retail_v2alpha.types.Product" }, { - "name": "product_id", - "type": "str" + "name": "set_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5516,22 +7116,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Product", - "shortName": "create_product" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "set_inventory" }, - "description": "Sample for CreateProduct", - "file": "retail_v2alpha_generated_product_service_create_product_async.py", + "description": "Sample for SetInventory", + "file": "retail_v2alpha_generated_product_service_set_inventory_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_CreateProduct_async", + "regionTag": "retail_v2alpha_generated_ProductService_SetInventory_async", "segments": [ { - "end": 56, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 58, "start": 27, "type": "SHORT" }, @@ -5541,22 +7141,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_create_product_async.py" + "title": "retail_v2alpha_generated_product_service_set_inventory_async.py" }, { "canonical": true, @@ -5565,31 +7165,27 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.create_product", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.set_inventory", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.CreateProduct", + "fullName": "google.cloud.retail.v2alpha.ProductService.SetInventory", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "CreateProduct" + "shortName": "SetInventory" }, "parameters": [ { - "name": "request", - "type": "google.cloud.retail_v2alpha.types.CreateProductRequest" - }, - { - "name": "parent", - "type": "str" + "name": "request", + "type": "google.cloud.retail_v2alpha.types.SetInventoryRequest" }, { - "name": "product", + "name": "inventory", "type": "google.cloud.retail_v2alpha.types.Product" }, { - "name": "product_id", - "type": "str" + "name": "set_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5604,22 +7200,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Product", - "shortName": "create_product" + "resultType": "google.api_core.operation.Operation", + "shortName": "set_inventory" }, - "description": "Sample for CreateProduct", - "file": "retail_v2alpha_generated_product_service_create_product_sync.py", + "description": "Sample for SetInventory", + "file": "retail_v2alpha_generated_product_service_set_inventory_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_CreateProduct_sync", + "regionTag": "retail_v2alpha_generated_ProductService_SetInventory_sync", "segments": [ { - "end": 56, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 58, "start": 27, "type": "SHORT" }, @@ -5629,22 +7225,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_create_product_sync.py" + "title": "retail_v2alpha_generated_product_service_set_inventory_sync.py" }, { "canonical": true, @@ -5654,23 +7250,27 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", "shortName": "ProductServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.delete_product", + "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.update_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.DeleteProduct", + "fullName": "google.cloud.retail.v2alpha.ProductService.UpdateProduct", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "DeleteProduct" + "shortName": "UpdateProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteProductRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateProductRequest" }, { - "name": "name", - "type": "str" + "name": "product", + "type": "google.cloud.retail_v2alpha.types.Product" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5685,21 +7285,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_product" + "resultType": "google.cloud.retail_v2alpha.types.Product", + "shortName": "update_product" }, - "description": "Sample for DeleteProduct", - "file": "retail_v2alpha_generated_product_service_delete_product_async.py", + "description": "Sample for UpdateProduct", + "file": "retail_v2alpha_generated_product_service_update_product_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_DeleteProduct_async", + "regionTag": "retail_v2alpha_generated_ProductService_UpdateProduct_async", "segments": [ { - "end": 49, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 54, "start": 27, "type": "SHORT" }, @@ -5709,20 +7310,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_delete_product_async.py" + "title": "retail_v2alpha_generated_product_service_update_product_async.py" }, { "canonical": true, @@ -5731,23 +7334,27 @@ "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", "shortName": "ProductServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.delete_product", + "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.update_product", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.DeleteProduct", + "fullName": "google.cloud.retail.v2alpha.ProductService.UpdateProduct", "service": { "fullName": "google.cloud.retail.v2alpha.ProductService", "shortName": "ProductService" }, - "shortName": "DeleteProduct" + "shortName": "UpdateProduct" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.DeleteProductRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateProductRequest" }, { - "name": "name", - "type": "str" + "name": "product", + "type": "google.cloud.retail_v2alpha.types.Product" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5762,21 +7369,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_product" + "resultType": "google.cloud.retail_v2alpha.types.Product", + "shortName": "update_product" }, - "description": "Sample for DeleteProduct", - "file": "retail_v2alpha_generated_product_service_delete_product_sync.py", + "description": "Sample for UpdateProduct", + "file": "retail_v2alpha_generated_product_service_update_product_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_DeleteProduct_sync", + "regionTag": "retail_v2alpha_generated_ProductService_UpdateProduct_sync", "segments": [ { - "end": 49, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 54, "start": 27, "type": "SHORT" }, @@ -5786,45 +7394,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_delete_product_sync.py" + "title": "retail_v2alpha_generated_product_service_update_product_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.get_product", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.accept_terms", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.GetProduct", + "fullName": "google.cloud.retail.v2alpha.ProjectService.AcceptTerms", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "GetProduct" + "shortName": "AcceptTerms" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetProductRequest" + "type": "google.cloud.retail_v2alpha.types.AcceptTermsRequest" }, { - "name": "name", + "name": "project", "type": "str" }, { @@ -5840,14 +7450,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Product", - "shortName": "get_product" + "resultType": "google.cloud.retail_v2alpha.types.Project", + "shortName": "accept_terms" }, - "description": "Sample for GetProduct", - "file": "retail_v2alpha_generated_product_service_get_product_async.py", + "description": "Sample for AcceptTerms", + "file": "retail_v2alpha_generated_project_service_accept_terms_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_GetProduct_async", + "regionTag": "retail_v2alpha_generated_ProjectService_AcceptTerms_async", "segments": [ { "end": 51, @@ -5880,31 +7490,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_get_product_async.py" + "title": "retail_v2alpha_generated_project_service_accept_terms_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.get_product", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.accept_terms", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.GetProduct", + "fullName": "google.cloud.retail.v2alpha.ProjectService.AcceptTerms", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "GetProduct" + "shortName": "AcceptTerms" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.GetProductRequest" + "type": "google.cloud.retail_v2alpha.types.AcceptTermsRequest" }, { - "name": "name", + "name": "project", "type": "str" }, { @@ -5920,14 +7530,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Product", - "shortName": "get_product" + "resultType": "google.cloud.retail_v2alpha.types.Project", + "shortName": "accept_terms" }, - "description": "Sample for GetProduct", - "file": "retail_v2alpha_generated_product_service_get_product_sync.py", + "description": "Sample for AcceptTerms", + "file": "retail_v2alpha_generated_project_service_accept_terms_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_GetProduct_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_AcceptTerms_sync", "segments": [ { "end": 51, @@ -5960,29 +7570,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_get_product_sync.py" + "title": "retail_v2alpha_generated_project_service_accept_terms_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.import_products", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.enroll_solution", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.ImportProducts", + "fullName": "google.cloud.retail.v2alpha.ProjectService.EnrollSolution", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "ImportProducts" + "shortName": "EnrollSolution" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ImportProductsRequest" + "type": "google.cloud.retail_v2alpha.types.EnrollSolutionRequest" }, { "name": "retry", @@ -5998,21 +7608,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_products" + "shortName": "enroll_solution" }, - "description": "Sample for ImportProducts", - "file": "retail_v2alpha_generated_product_service_import_products_async.py", + "description": "Sample for EnrollSolution", + "file": "retail_v2alpha_generated_project_service_enroll_solution_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_ImportProducts_async", + "regionTag": "retail_v2alpha_generated_ProjectService_EnrollSolution_async", "segments": [ { - "end": 59, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 56, "start": 27, "type": "SHORT" }, @@ -6022,43 +7632,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_import_products_async.py" + "title": "retail_v2alpha_generated_project_service_enroll_solution_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.import_products", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.enroll_solution", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.ImportProducts", + "fullName": "google.cloud.retail.v2alpha.ProjectService.EnrollSolution", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "ImportProducts" + "shortName": "EnrollSolution" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ImportProductsRequest" + "type": "google.cloud.retail_v2alpha.types.EnrollSolutionRequest" }, { "name": "retry", @@ -6074,21 +7684,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "import_products" + "shortName": "enroll_solution" }, - "description": "Sample for ImportProducts", - "file": "retail_v2alpha_generated_product_service_import_products_sync.py", + "description": "Sample for EnrollSolution", + "file": "retail_v2alpha_generated_project_service_enroll_solution_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_ImportProducts_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_EnrollSolution_sync", "segments": [ { - "end": 59, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 56, "start": 27, "type": "SHORT" }, @@ -6098,47 +7708,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_import_products_sync.py" + "title": "retail_v2alpha_generated_project_service_enroll_solution_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.list_products", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.get_alert_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.ListProducts", + "fullName": "google.cloud.retail.v2alpha.ProjectService.GetAlertConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "ListProducts" + "shortName": "GetAlertConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListProductsRequest" + "type": "google.cloud.retail_v2alpha.types.GetAlertConfigRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -6154,22 +7764,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.product_service.pagers.ListProductsAsyncPager", - "shortName": "list_products" + "resultType": "google.cloud.retail_v2alpha.types.AlertConfig", + "shortName": "get_alert_config" }, - "description": "Sample for ListProducts", - "file": "retail_v2alpha_generated_product_service_list_products_async.py", + "description": "Sample for GetAlertConfig", + "file": "retail_v2alpha_generated_project_service_get_alert_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_ListProducts_async", + "regionTag": "retail_v2alpha_generated_ProjectService_GetAlertConfig_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6189,36 +7799,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_list_products_async.py" + "title": "retail_v2alpha_generated_project_service_get_alert_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.list_products", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.get_alert_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.ListProducts", + "fullName": "google.cloud.retail.v2alpha.ProjectService.GetAlertConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "ListProducts" + "shortName": "GetAlertConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.ListProductsRequest" + "type": "google.cloud.retail_v2alpha.types.GetAlertConfigRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -6234,22 +7844,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.services.product_service.pagers.ListProductsPager", - "shortName": "list_products" + "resultType": "google.cloud.retail_v2alpha.types.AlertConfig", + "shortName": "get_alert_config" }, - "description": "Sample for ListProducts", - "file": "retail_v2alpha_generated_product_service_list_products_sync.py", + "description": "Sample for GetAlertConfig", + "file": "retail_v2alpha_generated_project_service_get_alert_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_ListProducts_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_GetAlertConfig_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6269,34 +7879,38 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_list_products_sync.py" + "title": "retail_v2alpha_generated_project_service_get_alert_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.purge_products", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.get_logging_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.PurgeProducts", + "fullName": "google.cloud.retail.v2alpha.ProjectService.GetLoggingConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "PurgeProducts" + "shortName": "GetLoggingConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.PurgeProductsRequest" + "type": "google.cloud.retail_v2alpha.types.GetLoggingConfigRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -6311,22 +7925,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "purge_products" + "resultType": "google.cloud.retail_v2alpha.types.LoggingConfig", + "shortName": "get_logging_config" }, - "description": "Sample for PurgeProducts", - "file": "retail_v2alpha_generated_product_service_purge_products_async.py", + "description": "Sample for GetLoggingConfig", + "file": "retail_v2alpha_generated_project_service_get_logging_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_PurgeProducts_async", + "regionTag": "retail_v2alpha_generated_ProjectService_GetLoggingConfig_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6336,43 +7950,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_purge_products_async.py" + "title": "retail_v2alpha_generated_project_service_get_logging_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.purge_products", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.get_logging_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.PurgeProducts", + "fullName": "google.cloud.retail.v2alpha.ProjectService.GetLoggingConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "PurgeProducts" + "shortName": "GetLoggingConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.PurgeProductsRequest" + "type": "google.cloud.retail_v2alpha.types.GetLoggingConfigRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -6387,22 +8005,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "purge_products" + "resultType": "google.cloud.retail_v2alpha.types.LoggingConfig", + "shortName": "get_logging_config" }, - "description": "Sample for PurgeProducts", - "file": "retail_v2alpha_generated_product_service_purge_products_sync.py", + "description": "Sample for GetLoggingConfig", + "file": "retail_v2alpha_generated_project_service_get_logging_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_PurgeProducts_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_GetLoggingConfig_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6412,47 +8030,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_purge_products_sync.py" + "title": "retail_v2alpha_generated_project_service_get_logging_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.remove_fulfillment_places", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.get_project", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces", + "fullName": "google.cloud.retail.v2alpha.ProjectService.GetProject", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "RemoveFulfillmentPlaces" + "shortName": "GetProject" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.RemoveFulfillmentPlacesRequest" + "type": "google.cloud.retail_v2alpha.types.GetProjectRequest" }, { - "name": "product", + "name": "name", "type": "str" }, { @@ -6468,22 +8086,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "remove_fulfillment_places" + "resultType": "google.cloud.retail_v2alpha.types.Project", + "shortName": "get_project" }, - "description": "Sample for RemoveFulfillmentPlaces", - "file": "retail_v2alpha_generated_product_service_remove_fulfillment_places_async.py", + "description": "Sample for GetProject", + "file": "retail_v2alpha_generated_project_service_get_project_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_RemoveFulfillmentPlaces_async", + "regionTag": "retail_v2alpha_generated_ProjectService_GetProject_async", "segments": [ { - "end": 57, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6493,46 +8111,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_remove_fulfillment_places_async.py" + "title": "retail_v2alpha_generated_project_service_get_project_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.remove_fulfillment_places", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.get_project", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces", + "fullName": "google.cloud.retail.v2alpha.ProjectService.GetProject", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "RemoveFulfillmentPlaces" + "shortName": "GetProject" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.RemoveFulfillmentPlacesRequest" + "type": "google.cloud.retail_v2alpha.types.GetProjectRequest" }, { - "name": "product", + "name": "name", "type": "str" }, { @@ -6548,22 +8166,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "remove_fulfillment_places" + "resultType": "google.cloud.retail_v2alpha.types.Project", + "shortName": "get_project" }, - "description": "Sample for RemoveFulfillmentPlaces", - "file": "retail_v2alpha_generated_product_service_remove_fulfillment_places_sync.py", + "description": "Sample for GetProject", + "file": "retail_v2alpha_generated_project_service_get_project_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_RemoveFulfillmentPlaces_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_GetProject_sync", "segments": [ { - "end": 57, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6573,47 +8191,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_remove_fulfillment_places_sync.py" + "title": "retail_v2alpha_generated_project_service_get_project_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.remove_local_inventories", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.list_enrolled_solutions", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories", + "fullName": "google.cloud.retail.v2alpha.ProjectService.ListEnrolledSolutions", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "RemoveLocalInventories" + "shortName": "ListEnrolledSolutions" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.RemoveLocalInventoriesRequest" + "type": "google.cloud.retail_v2alpha.types.ListEnrolledSolutionsRequest" }, { - "name": "product", + "name": "parent", "type": "str" }, { @@ -6629,22 +8247,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "remove_local_inventories" + "resultType": "google.cloud.retail_v2alpha.types.ListEnrolledSolutionsResponse", + "shortName": "list_enrolled_solutions" }, - "description": "Sample for RemoveLocalInventories", - "file": "retail_v2alpha_generated_product_service_remove_local_inventories_async.py", + "description": "Sample for ListEnrolledSolutions", + "file": "retail_v2alpha_generated_project_service_list_enrolled_solutions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_RemoveLocalInventories_async", + "regionTag": "retail_v2alpha_generated_ProjectService_ListEnrolledSolutions_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6654,46 +8272,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_remove_local_inventories_async.py" + "title": "retail_v2alpha_generated_project_service_list_enrolled_solutions_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.remove_local_inventories", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.list_enrolled_solutions", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.RemoveLocalInventories", + "fullName": "google.cloud.retail.v2alpha.ProjectService.ListEnrolledSolutions", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "RemoveLocalInventories" + "shortName": "ListEnrolledSolutions" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.RemoveLocalInventoriesRequest" + "type": "google.cloud.retail_v2alpha.types.ListEnrolledSolutionsRequest" }, { - "name": "product", + "name": "parent", "type": "str" }, { @@ -6709,22 +8327,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "remove_local_inventories" + "resultType": "google.cloud.retail_v2alpha.types.ListEnrolledSolutionsResponse", + "shortName": "list_enrolled_solutions" }, - "description": "Sample for RemoveLocalInventories", - "file": "retail_v2alpha_generated_product_service_remove_local_inventories_sync.py", + "description": "Sample for ListEnrolledSolutions", + "file": "retail_v2alpha_generated_project_service_list_enrolled_solutions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_RemoveLocalInventories_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_ListEnrolledSolutions_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6734,51 +8352,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_remove_local_inventories_sync.py" + "title": "retail_v2alpha_generated_project_service_list_enrolled_solutions_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.set_inventory", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.update_alert_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.SetInventory", + "fullName": "google.cloud.retail.v2alpha.ProjectService.UpdateAlertConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "SetInventory" + "shortName": "UpdateAlertConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.SetInventoryRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateAlertConfigRequest" }, { - "name": "inventory", - "type": "google.cloud.retail_v2alpha.types.Product" + "name": "alert_config", + "type": "google.cloud.retail_v2alpha.types.AlertConfig" }, { - "name": "set_mask", + "name": "update_mask", "type": "google.protobuf.field_mask_pb2.FieldMask" }, { @@ -6794,22 +8412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "set_inventory" + "resultType": "google.cloud.retail_v2alpha.types.AlertConfig", + "shortName": "update_alert_config" }, - "description": "Sample for SetInventory", - "file": "retail_v2alpha_generated_product_service_set_inventory_async.py", + "description": "Sample for UpdateAlertConfig", + "file": "retail_v2alpha_generated_project_service_update_alert_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_SetInventory_async", + "regionTag": "retail_v2alpha_generated_ProjectService_UpdateAlertConfig_async", "segments": [ { - "end": 58, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 54, "start": 27, "type": "SHORT" }, @@ -6824,45 +8442,45 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_set_inventory_async.py" + "title": "retail_v2alpha_generated_project_service_update_alert_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.set_inventory", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.update_alert_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.SetInventory", + "fullName": "google.cloud.retail.v2alpha.ProjectService.UpdateAlertConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "SetInventory" + "shortName": "UpdateAlertConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.SetInventoryRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateAlertConfigRequest" }, { - "name": "inventory", - "type": "google.cloud.retail_v2alpha.types.Product" + "name": "alert_config", + "type": "google.cloud.retail_v2alpha.types.AlertConfig" }, { - "name": "set_mask", + "name": "update_mask", "type": "google.protobuf.field_mask_pb2.FieldMask" }, { @@ -6878,22 +8496,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "set_inventory" + "resultType": "google.cloud.retail_v2alpha.types.AlertConfig", + "shortName": "update_alert_config" }, - "description": "Sample for SetInventory", - "file": "retail_v2alpha_generated_product_service_set_inventory_sync.py", + "description": "Sample for UpdateAlertConfig", + "file": "retail_v2alpha_generated_project_service_update_alert_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_SetInventory_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_UpdateAlertConfig_sync", "segments": [ { - "end": 58, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 54, "start": 27, "type": "SHORT" }, @@ -6908,43 +8526,43 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_set_inventory_sync.py" + "title": "retail_v2alpha_generated_project_service_update_alert_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient", - "shortName": "ProductServiceAsyncClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient", + "shortName": "ProjectServiceAsyncClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceAsyncClient.update_product", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceAsyncClient.update_logging_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.UpdateProduct", + "fullName": "google.cloud.retail.v2alpha.ProjectService.UpdateLoggingConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "UpdateProduct" + "shortName": "UpdateLoggingConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateProductRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateLoggingConfigRequest" }, { - "name": "product", - "type": "google.cloud.retail_v2alpha.types.Product" + "name": "logging_config", + "type": "google.cloud.retail_v2alpha.types.LoggingConfig" }, { "name": "update_mask", @@ -6963,14 +8581,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Product", - "shortName": "update_product" + "resultType": "google.cloud.retail_v2alpha.types.LoggingConfig", + "shortName": "update_logging_config" }, - "description": "Sample for UpdateProduct", - "file": "retail_v2alpha_generated_product_service_update_product_async.py", + "description": "Sample for UpdateLoggingConfig", + "file": "retail_v2alpha_generated_project_service_update_logging_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_UpdateProduct_async", + "regionTag": "retail_v2alpha_generated_ProjectService_UpdateLoggingConfig_async", "segments": [ { "end": 54, @@ -7003,32 +8621,32 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_update_product_async.py" + "title": "retail_v2alpha_generated_project_service_update_logging_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient", - "shortName": "ProductServiceClient" + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient", + "shortName": "ProjectServiceClient" }, - "fullName": "google.cloud.retail_v2alpha.ProductServiceClient.update_product", + "fullName": "google.cloud.retail_v2alpha.ProjectServiceClient.update_logging_config", "method": { - "fullName": "google.cloud.retail.v2alpha.ProductService.UpdateProduct", + "fullName": "google.cloud.retail.v2alpha.ProjectService.UpdateLoggingConfig", "service": { - "fullName": "google.cloud.retail.v2alpha.ProductService", - "shortName": "ProductService" + "fullName": "google.cloud.retail.v2alpha.ProjectService", + "shortName": "ProjectService" }, - "shortName": "UpdateProduct" + "shortName": "UpdateLoggingConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.retail_v2alpha.types.UpdateProductRequest" + "type": "google.cloud.retail_v2alpha.types.UpdateLoggingConfigRequest" }, { - "name": "product", - "type": "google.cloud.retail_v2alpha.types.Product" + "name": "logging_config", + "type": "google.cloud.retail_v2alpha.types.LoggingConfig" }, { "name": "update_mask", @@ -7047,14 +8665,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.retail_v2alpha.types.Product", - "shortName": "update_product" + "resultType": "google.cloud.retail_v2alpha.types.LoggingConfig", + "shortName": "update_logging_config" }, - "description": "Sample for UpdateProduct", - "file": "retail_v2alpha_generated_product_service_update_product_sync.py", + "description": "Sample for UpdateLoggingConfig", + "file": "retail_v2alpha_generated_project_service_update_logging_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "retail_v2alpha_generated_ProductService_UpdateProduct_sync", + "regionTag": "retail_v2alpha_generated_ProjectService_UpdateLoggingConfig_sync", "segments": [ { "end": 54, @@ -7087,7 +8705,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "retail_v2alpha_generated_product_service_update_product_sync.py" + "title": "retail_v2alpha_generated_project_service_update_logging_config_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json index ef74a8c47298..f0ca2075a908 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "0.1.0" + "version": "1.21.0" }, "snippets": [ { @@ -5791,6 +5791,159 @@ ], "title": "retail_v2beta_generated_product_service_list_products_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2beta.ProductServiceAsyncClient", + "shortName": "ProductServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2beta.ProductServiceAsyncClient.purge_products", + "method": { + "fullName": "google.cloud.retail.v2beta.ProductService.PurgeProducts", + "service": { + "fullName": "google.cloud.retail.v2beta.ProductService", + "shortName": "ProductService" + }, + "shortName": "PurgeProducts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2beta.types.PurgeProductsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_products" + }, + "description": "Sample for PurgeProducts", + "file": "retail_v2beta_generated_product_service_purge_products_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2beta_generated_ProductService_PurgeProducts_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2beta_generated_product_service_purge_products_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2beta.ProductServiceClient", + "shortName": "ProductServiceClient" + }, + "fullName": "google.cloud.retail_v2beta.ProductServiceClient.purge_products", + "method": { + "fullName": "google.cloud.retail.v2beta.ProductService.PurgeProducts", + "service": { + "fullName": "google.cloud.retail.v2beta.ProductService", + "shortName": "ProductService" + }, + "shortName": "PurgeProducts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2beta.types.PurgeProductsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_products" + }, + "description": "Sample for PurgeProducts", + "file": "retail_v2beta_generated_product_service_purge_products_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2beta_generated_ProductService_PurgeProducts_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2beta_generated_product_service_purge_products_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-retail/scripts/fixup_retail_v2_keywords.py b/packages/google-cloud-retail/scripts/fixup_retail_v2_keywords.py index 30ca98583335..53664c5299f2 100644 --- a/packages/google-cloud-retail/scripts/fixup_retail_v2_keywords.py +++ b/packages/google-cloud-retail/scripts/fixup_retail_v2_keywords.py @@ -44,7 +44,7 @@ class retailCallTransformer(cst.CSTTransformer): 'add_fulfillment_places': ('product', 'type_', 'place_ids', 'add_time', 'allow_missing', ), 'add_local_inventories': ('product', 'local_inventories', 'add_mask', 'add_time', 'allow_missing', ), 'collect_user_event': ('parent', 'user_event', 'prebuilt_rule', 'uri', 'ets', 'raw_json', ), - 'complete_query': ('catalog', 'query', 'visitor_id', 'language_codes', 'device_type', 'dataset', 'max_suggestions', 'entity', ), + 'complete_query': ('catalog', 'query', 'visitor_id', 'language_codes', 'device_type', 'dataset', 'max_suggestions', 'enable_attribute_suggestions', 'entity', ), 'create_control': ('parent', 'control', 'control_id', ), 'create_model': ('parent', 'model', 'dry_run', ), 'create_product': ('parent', 'product', 'product_id', ), @@ -71,6 +71,7 @@ class retailCallTransformer(cst.CSTTransformer): 'list_serving_configs': ('parent', 'page_size', 'page_token', ), 'pause_model': ('name', ), 'predict': ('placement', 'user_event', 'page_size', 'page_token', 'filter', 'validate_only', 'params', 'labels', ), + 'purge_products': ('parent', 'filter', 'force', ), 'purge_user_events': ('parent', 'filter', 'force', ), 'rejoin_user_events': ('parent', 'user_event_rejoin_scope', ), 'remove_catalog_attribute': ('attributes_config', 'key', ), diff --git a/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py b/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py index 44e518bf6c6a..a68b750b304a 100644 --- a/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py +++ b/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py @@ -39,6 +39,7 @@ def partition( class retailCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'accept_terms': ('project', ), 'add_catalog_attribute': ('attributes_config', 'catalog_attribute', ), 'add_control': ('serving_config', 'control_id', ), 'add_fulfillment_places': ('product', 'type_', 'place_ids', 'add_time', 'allow_missing', ), @@ -56,19 +57,26 @@ class retailCallTransformer(cst.CSTTransformer): 'delete_model': ('name', ), 'delete_product': ('name', 'force', ), 'delete_serving_config': ('name', ), + 'enroll_solution': ('project', 'solution', ), 'export_analytics_metrics': ('catalog', 'output_config', 'filter', ), + 'get_alert_config': ('name', ), 'get_attributes_config': ('name', ), + 'get_branch': ('name', 'view', ), 'get_completion_config': ('name', ), 'get_control': ('name', ), 'get_default_branch': ('catalog', ), + 'get_logging_config': ('name', ), 'get_model': ('name', ), 'get_product': ('name', ), + 'get_project': ('name', ), 'get_serving_config': ('name', ), 'import_completion_data': ('parent', 'input_config', 'notification_pubsub_topic', ), 'import_products': ('parent', 'input_config', 'request_id', 'errors_config', 'update_mask', 'reconciliation_mode', 'notification_pubsub_topic', 'skip_default_branch_protection', ), 'import_user_events': ('parent', 'input_config', 'errors_config', ), + 'list_branches': ('parent', 'view', ), 'list_catalogs': ('parent', 'page_size', 'page_token', ), 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_enrolled_solutions': ('parent', ), 'list_merchant_center_account_links': ('parent', ), 'list_models': ('parent', 'page_size', 'page_token', ), 'list_products': ('parent', 'page_size', 'page_token', 'filter', 'read_mask', 'require_total_size', ), @@ -88,10 +96,12 @@ class retailCallTransformer(cst.CSTTransformer): 'set_default_branch': ('catalog', 'branch_id', 'note', 'force', ), 'set_inventory': ('inventory', 'set_mask', 'set_time', 'allow_missing', ), 'tune_model': ('name', ), + 'update_alert_config': ('alert_config', 'update_mask', ), 'update_attributes_config': ('attributes_config', 'update_mask', ), 'update_catalog': ('catalog', 'update_mask', ), 'update_completion_config': ('completion_config', 'update_mask', ), 'update_control': ('control', 'update_mask', ), + 'update_logging_config': ('logging_config', 'update_mask', ), 'update_model': ('model', 'update_mask', ), 'update_product': ('product', 'update_mask', 'allow_missing', ), 'update_serving_config': ('serving_config', 'update_mask', ), diff --git a/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py b/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py index d6cd2a3151f1..03bc7e8515b3 100644 --- a/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py +++ b/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py @@ -45,7 +45,7 @@ class retailCallTransformer(cst.CSTTransformer): 'add_local_inventories': ('product', 'local_inventories', 'add_mask', 'add_time', 'allow_missing', ), 'batch_remove_catalog_attributes': ('attributes_config', 'attribute_keys', ), 'collect_user_event': ('parent', 'user_event', 'prebuilt_rule', 'uri', 'ets', 'raw_json', ), - 'complete_query': ('catalog', 'query', 'visitor_id', 'language_codes', 'device_type', 'dataset', 'max_suggestions', 'entity', ), + 'complete_query': ('catalog', 'query', 'visitor_id', 'language_codes', 'device_type', 'dataset', 'max_suggestions', 'enable_attribute_suggestions', 'entity', ), 'create_control': ('parent', 'control', 'control_id', ), 'create_model': ('parent', 'model', 'dry_run', ), 'create_product': ('parent', 'product', 'product_id', ), @@ -72,6 +72,7 @@ class retailCallTransformer(cst.CSTTransformer): 'list_serving_configs': ('parent', 'page_size', 'page_token', ), 'pause_model': ('name', ), 'predict': ('placement', 'user_event', 'page_size', 'page_token', 'filter', 'validate_only', 'params', 'labels', ), + 'purge_products': ('parent', 'filter', 'force', ), 'purge_user_events': ('parent', 'filter', 'force', ), 'rejoin_user_events': ('parent', 'user_event_rejoin_scope', ), 'remove_catalog_attribute': ('attributes_config', 'key', ), diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py index 3d342325a808..1951ae59ef7b 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py @@ -1531,13 +1531,13 @@ def test_list_catalogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_catalogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py index 31503e91f01b..9c1e0a6c6c57 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py @@ -1885,6 +1885,7 @@ def test_complete_query_rest_required_fields( ( "dataset", "device_type", + "enable_attribute_suggestions", "entity", "language_codes", "max_suggestions", @@ -1958,6 +1959,7 @@ def test_complete_query_rest_unset_required_fields(): ( "dataset", "deviceType", + "enableAttributeSuggestions", "entity", "languageCodes", "maxSuggestions", diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py index a3887f3aeea6..c42307022c9b 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py @@ -3135,13 +3135,13 @@ def test_list_controls_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_controls(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3335,11 +3335,20 @@ def test_create_control_rest(request_type): "twoway_synonyms_action": { "synonyms": ["synonyms_value1", "synonyms_value2"] }, + "force_return_facet_action": { + "facet_position_adjustments": [ + {"attribute_name": "attribute_name_value", "position": 885} + ] + }, + "remove_facet_action": { + "attribute_names": ["attribute_names_value1", "attribute_names_value2"] + }, "condition": { "query_terms": [{"value": "value_value", "full_match": True}], "active_time_range": [ {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} ], + "page_categories": ["page_categories_value1", "page_categories_value2"], }, }, "name": "name_value", @@ -4095,11 +4104,20 @@ def test_update_control_rest(request_type): "twoway_synonyms_action": { "synonyms": ["synonyms_value1", "synonyms_value2"] }, + "force_return_facet_action": { + "facet_position_adjustments": [ + {"attribute_name": "attribute_name_value", "position": 885} + ] + }, + "remove_facet_action": { + "attribute_names": ["attribute_names_value1", "attribute_names_value2"] + }, "condition": { "query_terms": [{"value": "value_value", "full_match": True}], "active_time_range": [ {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} ], + "page_categories": ["page_categories_value1", "page_categories_value2"], }, }, "name": "projects/sample1/locations/sample2/catalogs/sample3/controls/sample4", diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py index dc4c849c035d..7153fb343b47 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py @@ -3513,13 +3513,13 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4498,6 +4498,9 @@ def test_create_model_rest(request_type): ] } ], + "model_features_config": { + "frequently_bought_together_config": {"context_products_type": 1} + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6561,6 +6564,9 @@ def test_update_model_rest(request_type): ] } ], + "model_features_config": { + "frequently_bought_together_config": {"context_products_type": 1} + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py index 0b40fe284057..0a496005a46b 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py @@ -64,10 +64,10 @@ pagers, transports, ) +from google.cloud.retail_v2.types import product_service, promotion, purge_config from google.cloud.retail_v2.types import common, import_config from google.cloud.retail_v2.types import product from google.cloud.retail_v2.types import product as gcr_product -from google.cloud.retail_v2.types import product_service, promotion def client_cert_source_callback(): @@ -2475,13 +2475,13 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3445,6 +3445,295 @@ async def test_delete_product_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeProductsRequest, + dict, + ], +) +def test_purge_products(request_type, transport: str = "grpc"): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeProductsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_products_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_products() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeProductsRequest() + + +def test_purge_products_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = purge_config.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_products(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + +def test_purge_products_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.purge_products] = mock_rpc + request = {} + client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_products_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_products() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeProductsRequest() + + +@pytest.mark.asyncio +async def test_purge_products_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.purge_products + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.purge_products + ] = mock_object + + request = {} + await client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.purge_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_products_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeProductsRequest +): + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeProductsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_products_async_from_dict(): + await test_purge_products_async(request_type=dict) + + +def test_purge_products_field_headers(): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeProductsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_products_field_headers_async(): + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeProductsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -7738,6 +8027,267 @@ def test_delete_product_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeProductsRequest, + dict, + ], +) +def test_purge_products_rest(request_type): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_products(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_products_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.purge_products] = mock_rpc + + request = {} + client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_purge_products_rest_required_fields( + request_type=purge_config.PurgeProductsRequest, +): + transport_class = transports.ProductServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_products._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_products._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_products(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_products_rest_unset_required_fields(): + transport = transports.ProductServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_products._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_products_rest_interceptors(null_interceptor): + transport = transports.ProductServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProductServiceRestInterceptor(), + ) + client = ProductServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProductServiceRestInterceptor, "post_purge_products" + ) as post, mock.patch.object( + transports.ProductServiceRestInterceptor, "pre_purge_products" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeProductsRequest.pb( + purge_config.PurgeProductsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeProductsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_products( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_products_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeProductsRequest +): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_products(request) + + +def test_purge_products_rest_error(): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9746,6 +10296,7 @@ def test_product_service_base_transport(): "list_products", "update_product", "delete_product", + "purge_products", "import_products", "set_inventory", "add_fulfillment_places", @@ -10049,6 +10600,9 @@ def test_product_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_product._session session2 = client2.transport.delete_product._session assert session1 != session2 + session1 = client1.transport.purge_products._session + session2 = client2.transport.purge_products._session + assert session1 != session2 session1 = client1.transport.import_products._session session2 = client2.transport.import_products._session assert session1 != session2 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py index 119646a5b127..5b7d4b01df32 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py @@ -1477,13 +1477,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("placement", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py index de358f71c0b0..11fbf6488c65 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py @@ -1229,6 +1229,7 @@ def test_create_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.create_serving_config(request) @@ -1262,6 +1263,7 @@ def test_create_serving_config(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -1390,6 +1392,7 @@ async def test_create_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -1482,6 +1485,7 @@ async def test_create_serving_config_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -1516,6 +1520,7 @@ async def test_create_serving_config_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2111,6 +2116,7 @@ def test_update_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.update_serving_config(request) @@ -2144,6 +2150,7 @@ def test_update_serving_config(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2266,6 +2273,7 @@ async def test_update_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2358,6 +2366,7 @@ async def test_update_serving_config_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2392,6 +2401,7 @@ async def test_update_serving_config_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2600,6 +2610,7 @@ def test_get_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.get_serving_config(request) @@ -2633,6 +2644,7 @@ def test_get_serving_config(request_type, transport: str = "grpc"): == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2758,6 +2770,7 @@ async def test_get_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2850,6 +2863,7 @@ async def test_get_serving_config_async( diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2884,6 +2898,7 @@ async def test_get_serving_config_async( == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -3476,13 +3491,13 @@ def test_list_serving_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_serving_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3672,6 +3687,7 @@ def test_add_control(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.add_control(request) @@ -3705,6 +3721,7 @@ def test_add_control(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -3822,6 +3839,7 @@ async def test_add_control_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -3912,6 +3930,7 @@ async def test_add_control_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -3946,6 +3965,7 @@ async def test_add_control_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4134,6 +4154,7 @@ def test_remove_control(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.remove_control(request) @@ -4167,6 +4188,7 @@ def test_remove_control(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4284,6 +4306,7 @@ async def test_remove_control_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -4374,6 +4397,7 @@ async def test_remove_control_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -4408,6 +4432,7 @@ async def test_remove_control_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4613,6 +4638,7 @@ def test_create_serving_config_rest(request_type): "diversity_level": "diversity_level_value", "diversity_type": 2, "enable_category_filter_level": "enable_category_filter_level_value", + "ignore_recs_denylist": True, "personalization_spec": {"mode": 1}, "solution_types": [1], } @@ -4707,6 +4733,7 @@ def get_message_fields(field): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -4744,6 +4771,7 @@ def get_message_fields(field): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -5402,6 +5430,7 @@ def test_update_serving_config_rest(request_type): "diversity_level": "diversity_level_value", "diversity_type": 2, "enable_category_filter_level": "enable_category_filter_level_value", + "ignore_recs_denylist": True, "personalization_spec": {"mode": 1}, "solution_types": [1], } @@ -5496,6 +5525,7 @@ def get_message_fields(field): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -5533,6 +5563,7 @@ def get_message_fields(field): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -5852,6 +5883,7 @@ def test_get_serving_config_rest(request_type): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -5889,6 +5921,7 @@ def test_get_serving_config_rest(request_type): == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -6595,6 +6628,7 @@ def test_add_control_rest(request_type): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -6632,6 +6666,7 @@ def test_add_control_rest(request_type): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -6954,6 +6989,7 @@ def test_remove_control_rest(request_type): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -6991,6 +7027,7 @@ def test_remove_control_rest(request_type): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py new file mode 100644 index 000000000000..a487c3f18923 --- /dev/null +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py @@ -0,0 +1,3704 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.retail_v2alpha.services.branch_service import ( + BranchServiceAsyncClient, + BranchServiceClient, + transports, +) +from google.cloud.retail_v2alpha.types import branch, branch_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BranchServiceClient._get_default_mtls_endpoint(None) is None + assert ( + BranchServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BranchServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BranchServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BranchServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BranchServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert BranchServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BranchServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BranchServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + BranchServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BranchServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BranchServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BranchServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BranchServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BranchServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BranchServiceClient._get_client_cert_source(None, False) is None + assert ( + BranchServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + BranchServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BranchServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BranchServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + BranchServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceClient), +) +@mock.patch.object( + BranchServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BranchServiceClient._DEFAULT_UNIVERSE + default_endpoint = BranchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BranchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BranchServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BranchServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BranchServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BranchServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + BranchServiceClient._get_api_endpoint(None, None, default_universe, "always") + == BranchServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BranchServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BranchServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BranchServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + BranchServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BranchServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BranchServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BranchServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BranchServiceClient._get_universe_domain(None, None) + == BranchServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BranchServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BranchServiceClient, transports.BranchServiceGrpcTransport, "grpc"), + (BranchServiceClient, transports.BranchServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BranchServiceClient, "grpc"), + (BranchServiceAsyncClient, "grpc_asyncio"), + (BranchServiceClient, "rest"), + ], +) +def test_branch_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BranchServiceGrpcTransport, "grpc"), + (transports.BranchServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BranchServiceRestTransport, "rest"), + ], +) +def test_branch_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BranchServiceClient, "grpc"), + (BranchServiceAsyncClient, "grpc_asyncio"), + (BranchServiceClient, "rest"), + ], +) +def test_branch_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +def test_branch_service_client_get_transport_class(): + transport = BranchServiceClient.get_transport_class() + available_transports = [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceRestTransport, + ] + assert transport in available_transports + + transport = BranchServiceClient.get_transport_class("grpc") + assert transport == transports.BranchServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BranchServiceClient, transports.BranchServiceGrpcTransport, "grpc"), + ( + BranchServiceAsyncClient, + transports.BranchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (BranchServiceClient, transports.BranchServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + BranchServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceClient), +) +@mock.patch.object( + BranchServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceAsyncClient), +) +def test_branch_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BranchServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BranchServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (BranchServiceClient, transports.BranchServiceGrpcTransport, "grpc", "true"), + ( + BranchServiceAsyncClient, + transports.BranchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (BranchServiceClient, transports.BranchServiceGrpcTransport, "grpc", "false"), + ( + BranchServiceAsyncClient, + transports.BranchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (BranchServiceClient, transports.BranchServiceRestTransport, "rest", "true"), + (BranchServiceClient, transports.BranchServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + BranchServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceClient), +) +@mock.patch.object( + BranchServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_branch_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [BranchServiceClient, BranchServiceAsyncClient] +) +@mock.patch.object( + BranchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BranchServiceClient), +) +@mock.patch.object( + BranchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BranchServiceAsyncClient), +) +def test_branch_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [BranchServiceClient, BranchServiceAsyncClient] +) +@mock.patch.object( + BranchServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceClient), +) +@mock.patch.object( + BranchServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BranchServiceAsyncClient), +) +def test_branch_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BranchServiceClient._DEFAULT_UNIVERSE + default_endpoint = BranchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BranchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BranchServiceClient, transports.BranchServiceGrpcTransport, "grpc"), + ( + BranchServiceAsyncClient, + transports.BranchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (BranchServiceClient, transports.BranchServiceRestTransport, "rest"), + ], +) +def test_branch_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BranchServiceClient, + transports.BranchServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BranchServiceAsyncClient, + transports.BranchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (BranchServiceClient, transports.BranchServiceRestTransport, "rest", None), + ], +) +def test_branch_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_branch_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.retail_v2alpha.services.branch_service.transports.BranchServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BranchServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BranchServiceClient, + transports.BranchServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BranchServiceAsyncClient, + transports.BranchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_branch_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + branch_service.ListBranchesRequest, + dict, + ], +) +def test_list_branches(request_type, transport: str = "grpc"): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = branch_service.ListBranchesResponse() + response = client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = branch_service.ListBranchesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, branch_service.ListBranchesResponse) + + +def test_list_branches_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_branches() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == branch_service.ListBranchesRequest() + + +def test_list_branches_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = branch_service.ListBranchesRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_branches(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == branch_service.ListBranchesRequest( + parent="parent_value", + ) + + +def test_list_branches_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_branches in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_branches] = mock_rpc + request = {} + client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_branches(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_branches_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + branch_service.ListBranchesResponse() + ) + response = await client.list_branches() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == branch_service.ListBranchesRequest() + + +@pytest.mark.asyncio +async def test_list_branches_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_branches + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_branches + ] = mock_object + + request = {} + await client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_branches(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_branches_async( + transport: str = "grpc_asyncio", request_type=branch_service.ListBranchesRequest +): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + branch_service.ListBranchesResponse() + ) + response = await client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = branch_service.ListBranchesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, branch_service.ListBranchesResponse) + + +@pytest.mark.asyncio +async def test_list_branches_async_from_dict(): + await test_list_branches_async(request_type=dict) + + +def test_list_branches_field_headers(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = branch_service.ListBranchesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + call.return_value = branch_service.ListBranchesResponse() + client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_branches_field_headers_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = branch_service.ListBranchesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + branch_service.ListBranchesResponse() + ) + await client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_branches_flattened(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = branch_service.ListBranchesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_branches( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_branches_flattened_error(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_branches( + branch_service.ListBranchesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_branches_flattened_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_branches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = branch_service.ListBranchesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + branch_service.ListBranchesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_branches( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_branches_flattened_error_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_branches( + branch_service.ListBranchesRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + branch_service.GetBranchRequest, + dict, + ], +) +def test_get_branch(request_type, transport: str = "grpc"): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = branch.Branch( + name="name_value", + display_name="display_name_value", + is_default=True, + ) + response = client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = branch_service.GetBranchRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, branch.Branch) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_default is True + + +def test_get_branch_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_branch() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == branch_service.GetBranchRequest() + + +def test_get_branch_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = branch_service.GetBranchRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_branch(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == branch_service.GetBranchRequest( + name="name_value", + ) + + +def test_get_branch_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_branch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_branch] = mock_rpc + request = {} + client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_branch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_branch_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + branch.Branch( + name="name_value", + display_name="display_name_value", + is_default=True, + ) + ) + response = await client.get_branch() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == branch_service.GetBranchRequest() + + +@pytest.mark.asyncio +async def test_get_branch_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_branch + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_branch + ] = mock_object + + request = {} + await client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_branch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_branch_async( + transport: str = "grpc_asyncio", request_type=branch_service.GetBranchRequest +): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + branch.Branch( + name="name_value", + display_name="display_name_value", + is_default=True, + ) + ) + response = await client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = branch_service.GetBranchRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, branch.Branch) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_default is True + + +@pytest.mark.asyncio +async def test_get_branch_async_from_dict(): + await test_get_branch_async(request_type=dict) + + +def test_get_branch_field_headers(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = branch_service.GetBranchRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + call.return_value = branch.Branch() + client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_branch_field_headers_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = branch_service.GetBranchRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(branch.Branch()) + await client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_branch_flattened(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = branch.Branch() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_branch( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_branch_flattened_error(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_branch( + branch_service.GetBranchRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_branch_flattened_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_branch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = branch.Branch() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(branch.Branch()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_branch( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_branch_flattened_error_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_branch( + branch_service.GetBranchRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + branch_service.ListBranchesRequest, + dict, + ], +) +def test_list_branches_rest(request_type): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = branch_service.ListBranchesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = branch_service.ListBranchesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_branches(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, branch_service.ListBranchesResponse) + + +def test_list_branches_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_branches in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_branches] = mock_rpc + + request = {} + client.list_branches(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_branches(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_branches_rest_required_fields( + request_type=branch_service.ListBranchesRequest, +): + transport_class = transports.BranchServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_branches._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_branches._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = branch_service.ListBranchesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = branch_service.ListBranchesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_branches(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_branches_rest_unset_required_fields(): + transport = transports.BranchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_branches._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_branches_rest_interceptors(null_interceptor): + transport = transports.BranchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BranchServiceRestInterceptor(), + ) + client = BranchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BranchServiceRestInterceptor, "post_list_branches" + ) as post, mock.patch.object( + transports.BranchServiceRestInterceptor, "pre_list_branches" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = branch_service.ListBranchesRequest.pb( + branch_service.ListBranchesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = branch_service.ListBranchesResponse.to_json( + branch_service.ListBranchesResponse() + ) + + request = branch_service.ListBranchesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = branch_service.ListBranchesResponse() + + client.list_branches( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_branches_rest_bad_request( + transport: str = "rest", request_type=branch_service.ListBranchesRequest +): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_branches(request) + + +def test_list_branches_rest_flattened(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = branch_service.ListBranchesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = branch_service.ListBranchesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_branches(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2alpha/{parent=projects/*/locations/*/catalogs/*}/branches" + % client.transport._host, + args[1], + ) + + +def test_list_branches_rest_flattened_error(transport: str = "rest"): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_branches( + branch_service.ListBranchesRequest(), + parent="parent_value", + ) + + +def test_list_branches_rest_error(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + branch_service.GetBranchRequest, + dict, + ], +) +def test_get_branch_rest(request_type): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = branch.Branch( + name="name_value", + display_name="display_name_value", + is_default=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = branch.Branch.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_branch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, branch.Branch) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_default is True + + +def test_get_branch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_branch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_branch] = mock_rpc + + request = {} + client.get_branch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_branch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_branch_rest_required_fields(request_type=branch_service.GetBranchRequest): + transport_class = transports.BranchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_branch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_branch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = branch.Branch() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = branch.Branch.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_branch(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_branch_rest_unset_required_fields(): + transport = transports.BranchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_branch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_branch_rest_interceptors(null_interceptor): + transport = transports.BranchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BranchServiceRestInterceptor(), + ) + client = BranchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BranchServiceRestInterceptor, "post_get_branch" + ) as post, mock.patch.object( + transports.BranchServiceRestInterceptor, "pre_get_branch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = branch_service.GetBranchRequest.pb( + branch_service.GetBranchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = branch.Branch.to_json(branch.Branch()) + + request = branch_service.GetBranchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = branch.Branch() + + client.get_branch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_branch_rest_bad_request( + transport: str = "rest", request_type=branch_service.GetBranchRequest +): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_branch(request) + + +def test_get_branch_rest_flattened(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = branch.Branch() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = branch.Branch.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_branch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*}" + % client.transport._host, + args[1], + ) + + +def test_get_branch_rest_flattened_error(transport: str = "rest"): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_branch( + branch_service.GetBranchRequest(), + name="name_value", + ) + + +def test_get_branch_rest_error(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BranchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BranchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BranchServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BranchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BranchServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BranchServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BranchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BranchServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BranchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BranchServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BranchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BranchServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceGrpcAsyncIOTransport, + transports.BranchServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = BranchServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BranchServiceGrpcTransport, + ) + + +def test_branch_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BranchServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_branch_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.retail_v2alpha.services.branch_service.transports.BranchServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BranchServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_branches", + "get_branch", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_branch_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.retail_v2alpha.services.branch_service.transports.BranchServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BranchServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_branch_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.retail_v2alpha.services.branch_service.transports.BranchServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BranchServiceTransport() + adc.assert_called_once() + + +def test_branch_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BranchServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceGrpcAsyncIOTransport, + ], +) +def test_branch_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceGrpcAsyncIOTransport, + transports.BranchServiceRestTransport, + ], +) +def test_branch_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BranchServiceGrpcTransport, grpc_helpers), + (transports.BranchServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_branch_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceGrpcAsyncIOTransport, + ], +) +def test_branch_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_branch_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BranchServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_branch_service_host_no_port(transport_name): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_branch_service_host_with_port(transport_name): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_branch_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BranchServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BranchServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_branches._session + session2 = client2.transport.list_branches._session + assert session1 != session2 + session1 = client1.transport.get_branch._session + session2 = client2.transport.get_branch._session + assert session1 != session2 + + +def test_branch_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BranchServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_branch_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BranchServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceGrpcAsyncIOTransport, + ], +) +def test_branch_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BranchServiceGrpcTransport, + transports.BranchServiceGrpcAsyncIOTransport, + ], +) +def test_branch_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_branch_path(): + project = "squid" + location = "clam" + catalog = "whelk" + branch = "octopus" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/branches/{branch}".format( + project=project, + location=location, + catalog=catalog, + branch=branch, + ) + actual = BranchServiceClient.branch_path(project, location, catalog, branch) + assert expected == actual + + +def test_parse_branch_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "catalog": "cuttlefish", + "branch": "mussel", + } + path = BranchServiceClient.branch_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_branch_path(path) + assert expected == actual + + +def test_catalog_path(): + project = "winkle" + location = "nautilus" + catalog = "scallop" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}".format( + project=project, + location=location, + catalog=catalog, + ) + actual = BranchServiceClient.catalog_path(project, location, catalog) + assert expected == actual + + +def test_parse_catalog_path(): + expected = { + "project": "abalone", + "location": "squid", + "catalog": "clam", + } + path = BranchServiceClient.catalog_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_catalog_path(path) + assert expected == actual + + +def test_product_path(): + project = "whelk" + location = "octopus" + catalog = "oyster" + branch = "nudibranch" + product = "cuttlefish" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/branches/{branch}/products/{product}".format( + project=project, + location=location, + catalog=catalog, + branch=branch, + product=product, + ) + actual = BranchServiceClient.product_path( + project, location, catalog, branch, product + ) + assert expected == actual + + +def test_parse_product_path(): + expected = { + "project": "mussel", + "location": "winkle", + "catalog": "nautilus", + "branch": "scallop", + "product": "abalone", + } + path = BranchServiceClient.product_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_product_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BranchServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = BranchServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BranchServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = BranchServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BranchServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = BranchServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = BranchServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = BranchServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BranchServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = BranchServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BranchServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BranchServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BranchServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BranchServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/catalogs/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BranchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BranchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BranchServiceClient, transports.BranchServiceGrpcTransport), + (BranchServiceAsyncClient, transports.BranchServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py index 987010b94ecf..6ff947e0dee4 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py @@ -1531,13 +1531,13 @@ def test_list_catalogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_catalogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py index 55d8f0680367..0583008c6d59 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py @@ -3183,13 +3183,13 @@ def test_list_controls_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_controls(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3412,11 +3412,20 @@ def test_create_control_rest(request_type): "twoway_synonyms_action": { "synonyms": ["synonyms_value1", "synonyms_value2"] }, + "force_return_facet_action": { + "facet_position_adjustments": [ + {"attribute_name": "attribute_name_value", "position": 885} + ] + }, + "remove_facet_action": { + "attribute_names": ["attribute_names_value1", "attribute_names_value2"] + }, "condition": { "query_terms": [{"value": "value_value", "full_match": True}], "active_time_range": [ {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} ], + "page_categories": ["page_categories_value1", "page_categories_value2"], }, }, "name": "name_value", @@ -4209,11 +4218,20 @@ def test_update_control_rest(request_type): "twoway_synonyms_action": { "synonyms": ["synonyms_value1", "synonyms_value2"] }, + "force_return_facet_action": { + "facet_position_adjustments": [ + {"attribute_name": "attribute_name_value", "position": 885} + ] + }, + "remove_facet_action": { + "attribute_names": ["attribute_names_value1", "attribute_names_value2"] + }, "condition": { "query_terms": [{"value": "value_value", "full_match": True}], "active_time_range": [ {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} ], + "page_categories": ["page_categories_value1", "page_categories_value2"], }, }, "name": "projects/sample1/locations/sample2/catalogs/sample3/controls/sample4", diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py index b878cf717ff2..5b409e88c2cd 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py @@ -2845,6 +2845,7 @@ def test_create_merchant_center_account_link_rest(request_type): ], "state": 1, "project_id": "project_id_value", + "source": "source_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py index 1a5c1292d625..40f75d7aafd9 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py @@ -3537,13 +3537,13 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4557,6 +4557,9 @@ def test_create_model_rest(request_type): ] } ], + "model_features_config": { + "frequently_bought_together_config": {"context_products_type": 1} + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6639,6 +6642,9 @@ def test_update_model_rest(request_type): ] } ], + "model_features_config": { + "frequently_bought_together_config": {"context_products_type": 1} + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py index 72c0b2f9100f..618936a58f65 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py @@ -2480,13 +2480,13 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/test_maps_platform_datasets_v1_alpha.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py similarity index 62% rename from packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/test_maps_platform_datasets_v1_alpha.py rename to packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py index 170ed21e2655..4eaa545f4e52 100644 --- a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/test_maps_platform_datasets_v1_alpha.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py @@ -26,16 +26,26 @@ import json import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -44,16 +54,15 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha import ( - MapsPlatformDatasetsV1AlphaAsyncClient, - MapsPlatformDatasetsV1AlphaClient, - pagers, +from google.cloud.retail_v2alpha.services.project_service import ( + ProjectServiceAsyncClient, + ProjectServiceClient, transports, ) -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import data_source -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.cloud.retail_v2alpha.types import common +from google.cloud.retail_v2alpha.types import project +from google.cloud.retail_v2alpha.types import project as gcr_project +from google.cloud.retail_v2alpha.types import project_service def client_cert_source_callback(): @@ -89,47 +98,40 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(None) is None + assert ProjectServiceClient._get_default_mtls_endpoint(None) is None assert ( - MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(api_endpoint) + ProjectServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(api_mtls_endpoint) + ProjectServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(sandbox_endpoint) + ProjectServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint( - sandbox_mtls_endpoint - ) + ProjectServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(non_googleapi) - == non_googleapi + ProjectServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) def test__read_environment_variables(): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( - False, - "auto", - None, - ) + assert ProjectServiceClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( + assert ProjectServiceClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( + assert ProjectServiceClient._read_environment_variables() == ( False, "auto", None, @@ -139,28 +141,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - MapsPlatformDatasetsV1AlphaClient._read_environment_variables() + ProjectServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( + assert ProjectServiceClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( + assert ProjectServiceClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( + assert ProjectServiceClient._read_environment_variables() == ( False, "auto", None, @@ -168,14 +170,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - MapsPlatformDatasetsV1AlphaClient._read_environment_variables() + ProjectServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MapsPlatformDatasetsV1AlphaClient._read_environment_variables() == ( + assert ProjectServiceClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -186,19 +188,13 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() + assert ProjectServiceClient._get_client_cert_source(None, False) is None assert ( - MapsPlatformDatasetsV1AlphaClient._get_client_cert_source(None, False) is None - ) - assert ( - MapsPlatformDatasetsV1AlphaClient._get_client_cert_source( - mock_provided_cert_source, False - ) + ProjectServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_client_cert_source( - mock_provided_cert_source, True - ) + ProjectServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source ) @@ -210,11 +206,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - MapsPlatformDatasetsV1AlphaClient._get_client_cert_source(None, True) + ProjectServiceClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_client_cert_source( + ProjectServiceClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -222,74 +218,64 @@ def test__get_client_cert_source(): @mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, + ProjectServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaClient), + modify_default_endpoint_template(ProjectServiceClient), ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaAsyncClient, + ProjectServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaAsyncClient), + modify_default_endpoint_template(ProjectServiceAsyncClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE - default_endpoint = ( - MapsPlatformDatasetsV1AlphaClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = MapsPlatformDatasetsV1AlphaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( + ProjectServiceClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( + ProjectServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == MapsPlatformDatasetsV1AlphaClient.DEFAULT_MTLS_ENDPOINT + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( - None, None, default_universe, "auto" - ) + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( - None, None, default_universe, "always" - ) - == MapsPlatformDatasetsV1AlphaClient.DEFAULT_MTLS_ENDPOINT + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( + ProjectServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == MapsPlatformDatasetsV1AlphaClient.DEFAULT_MTLS_ENDPOINT + == ProjectServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( - None, None, mock_universe, "never" - ) + ProjectServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( - None, None, default_universe, "never" - ) + ProjectServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - MapsPlatformDatasetsV1AlphaClient._get_api_endpoint( + ProjectServiceClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -303,40 +289,30 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - MapsPlatformDatasetsV1AlphaClient._get_universe_domain( + ProjectServiceClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_universe_domain( - None, universe_domain_env - ) + ProjectServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - MapsPlatformDatasetsV1AlphaClient._get_universe_domain(None, None) - == MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE + ProjectServiceClient._get_universe_domain(None, None) + == ProjectServiceClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - MapsPlatformDatasetsV1AlphaClient._get_universe_domain("", None) + ProjectServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - "grpc", - ), - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - "rest", - ), + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -415,14 +391,12 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (MapsPlatformDatasetsV1AlphaClient, "grpc"), - (MapsPlatformDatasetsV1AlphaAsyncClient, "grpc_asyncio"), - (MapsPlatformDatasetsV1AlphaClient, "rest"), + (ProjectServiceClient, "grpc"), + (ProjectServiceAsyncClient, "grpc_asyncio"), + (ProjectServiceClient, "rest"), ], ) -def test_maps_platform_datasets_v1_alpha_client_from_service_account_info( - client_class, transport_name -): +def test_project_service_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" @@ -434,21 +408,21 @@ def test_maps_platform_datasets_v1_alpha_client_from_service_account_info( assert isinstance(client, client_class) assert client.transport._host == ( - "mapsplatformdatasets.googleapis.com:443" + "retail.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://mapsplatformdatasets.googleapis.com" + else "https://retail.googleapis.com" ) @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.MapsPlatformDatasetsV1AlphaGrpcTransport, "grpc"), - (transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MapsPlatformDatasetsV1AlphaRestTransport, "rest"), + (transports.ProjectServiceGrpcTransport, "grpc"), + (transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ProjectServiceRestTransport, "rest"), ], ) -def test_maps_platform_datasets_v1_alpha_client_service_account_always_use_jwt( +def test_project_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -469,14 +443,12 @@ def test_maps_platform_datasets_v1_alpha_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (MapsPlatformDatasetsV1AlphaClient, "grpc"), - (MapsPlatformDatasetsV1AlphaAsyncClient, "grpc_asyncio"), - (MapsPlatformDatasetsV1AlphaClient, "rest"), + (ProjectServiceClient, "grpc"), + (ProjectServiceAsyncClient, "grpc_asyncio"), + (ProjectServiceClient, "rest"), ], ) -def test_maps_platform_datasets_v1_alpha_client_from_service_account_file( - client_class, transport_name -): +def test_project_service_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" @@ -495,69 +467,57 @@ def test_maps_platform_datasets_v1_alpha_client_from_service_account_file( assert isinstance(client, client_class) assert client.transport._host == ( - "mapsplatformdatasets.googleapis.com:443" + "retail.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://mapsplatformdatasets.googleapis.com" + else "https://retail.googleapis.com" ) -def test_maps_platform_datasets_v1_alpha_client_get_transport_class(): - transport = MapsPlatformDatasetsV1AlphaClient.get_transport_class() +def test_project_service_client_get_transport_class(): + transport = ProjectServiceClient.get_transport_class() available_transports = [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaRestTransport, + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceRestTransport, ] assert transport in available_transports - transport = MapsPlatformDatasetsV1AlphaClient.get_transport_class("grpc") - assert transport == transports.MapsPlatformDatasetsV1AlphaGrpcTransport + transport = ProjectServiceClient.get_transport_class("grpc") + assert transport == transports.ProjectServiceGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - "grpc", - ), - ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio", ), - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - "rest", - ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), ], ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, + ProjectServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaClient), + modify_default_endpoint_template(ProjectServiceClient), ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaAsyncClient, + ProjectServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaAsyncClient), + modify_default_endpoint_template(ProjectServiceAsyncClient), ) -def test_maps_platform_datasets_v1_alpha_client_client_options( +def test_project_service_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, "get_transport_class" - ) as gtc: + with mock.patch.object(ProjectServiceClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, "get_transport_class" - ) as gtc: + with mock.patch.object(ProjectServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -680,56 +640,36 @@ def test_maps_platform_datasets_v1_alpha_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc", "true"), ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - "grpc", - "true", - ), - ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio", "true", ), + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc", "false"), ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - "grpc", - "false", - ), - ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio", "false", ), - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - "rest", - "true", - ), - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - "rest", - "false", - ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", "true"), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", "false"), ], ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, + ProjectServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaClient), + modify_default_endpoint_template(ProjectServiceClient), ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaAsyncClient, + ProjectServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaAsyncClient), + modify_default_endpoint_template(ProjectServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_maps_platform_datasets_v1_alpha_client_mtls_env_auto( +def test_project_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -832,22 +772,19 @@ def test_maps_platform_datasets_v1_alpha_client_mtls_env_auto( @pytest.mark.parametrize( - "client_class", - [MapsPlatformDatasetsV1AlphaClient, MapsPlatformDatasetsV1AlphaAsyncClient], + "client_class", [ProjectServiceClient, ProjectServiceAsyncClient] ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, + ProjectServiceClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(MapsPlatformDatasetsV1AlphaClient), + modify_default_endpoint(ProjectServiceClient), ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaAsyncClient, + ProjectServiceAsyncClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(MapsPlatformDatasetsV1AlphaAsyncClient), + modify_default_endpoint(ProjectServiceAsyncClient), ) -def test_maps_platform_datasets_v1_alpha_client_get_mtls_endpoint_and_cert_source( - client_class, -): +def test_project_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -939,30 +876,27 @@ def test_maps_platform_datasets_v1_alpha_client_get_mtls_endpoint_and_cert_sourc @pytest.mark.parametrize( - "client_class", - [MapsPlatformDatasetsV1AlphaClient, MapsPlatformDatasetsV1AlphaAsyncClient], + "client_class", [ProjectServiceClient, ProjectServiceAsyncClient] ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaClient, + ProjectServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaClient), + modify_default_endpoint_template(ProjectServiceClient), ) @mock.patch.object( - MapsPlatformDatasetsV1AlphaAsyncClient, + ProjectServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(MapsPlatformDatasetsV1AlphaAsyncClient), + modify_default_endpoint_template(ProjectServiceAsyncClient), ) -def test_maps_platform_datasets_v1_alpha_client_client_api_endpoint(client_class): +def test_project_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = MapsPlatformDatasetsV1AlphaClient._DEFAULT_UNIVERSE - default_endpoint = ( - MapsPlatformDatasetsV1AlphaClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) + default_universe = ProjectServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = MapsPlatformDatasetsV1AlphaClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = ProjectServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -1030,24 +964,16 @@ def test_maps_platform_datasets_v1_alpha_client_client_api_endpoint(client_class @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ + (ProjectServiceClient, transports.ProjectServiceGrpcTransport, "grpc"), ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - "grpc", - ), - ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio", ), - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - "rest", - ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest"), ], ) -def test_maps_platform_datasets_v1_alpha_client_client_options_scopes( +def test_project_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -1076,26 +1002,21 @@ def test_maps_platform_datasets_v1_alpha_client_client_options_scopes( "client_class,transport_class,transport_name,grpc_helpers", [ ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + ProjectServiceClient, + transports.ProjectServiceGrpcTransport, "grpc", grpc_helpers, ), ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async, ), - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - "rest", - None, - ), + (ProjectServiceClient, transports.ProjectServiceRestTransport, "rest", None), ], ) -def test_maps_platform_datasets_v1_alpha_client_client_options_credentials_file( +def test_project_service_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -1119,12 +1040,12 @@ def test_maps_platform_datasets_v1_alpha_client_client_options_credentials_file( ) -def test_maps_platform_datasets_v1_alpha_client_client_options_from_dict(): +def test_project_service_client_client_options_from_dict(): with mock.patch( - "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaGrpcTransport.__init__" + "google.cloud.retail_v2alpha.services.project_service.transports.ProjectServiceGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( @@ -1144,20 +1065,20 @@ def test_maps_platform_datasets_v1_alpha_client_client_options_from_dict(): "client_class,transport_class,transport_name,grpc_helpers", [ ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + ProjectServiceClient, + transports.ProjectServiceGrpcTransport, "grpc", grpc_helpers, ), ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ProjectServiceAsyncClient, + transports.ProjectServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async, ), ], ) -def test_maps_platform_datasets_v1_alpha_client_create_channel_credentials_file( +def test_project_service_client_create_channel_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -1194,13 +1115,13 @@ def test_maps_platform_datasets_v1_alpha_client_create_channel_credentials_file( adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) create_channel.assert_called_with( - "mapsplatformdatasets.googleapis.com:443", + "retail.googleapis.com:443", credentials=file_creds, credentials_file=None, quota_project_id=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, - default_host="mapsplatformdatasets.googleapis.com", + default_host="retail.googleapis.com", ssl_credentials=None, options=[ ("grpc.max_send_message_length", -1), @@ -1212,12 +1133,12 @@ def test_maps_platform_datasets_v1_alpha_client_create_channel_credentials_file( @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.CreateDatasetRequest, + project_service.GetProjectRequest, dict, ], ) -def test_create_dataset(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1227,59 +1148,51 @@ def test_create_dataset(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gmm_dataset.Dataset( + call.return_value = project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) - response = client.create_dataset(request) + response = client.get_project(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.CreateDatasetRequest() + request = project_service.GetProjectRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gmm_dataset.Dataset) + assert isinstance(response, project.Project) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == gmm_dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] -def test_create_dataset_empty_call(): +def test_get_project_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_dataset() + client.get_project() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.CreateDatasetRequest() + assert args[0] == project_service.GetProjectRequest() -def test_create_dataset_non_empty_request_with_auto_populated_field(): +def test_get_project_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1287,28 +1200,28 @@ def test_create_dataset_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.CreateDatasetRequest( - parent="parent_value", + request = project_service.GetProjectRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_dataset(request=request) + client.get_project(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.CreateDatasetRequest( - parent="parent_value", + assert args[0] == project_service.GetProjectRequest( + name="name_value", ) -def test_create_dataset_use_cached_wrapped_rpc(): +def test_get_project_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1318,21 +1231,21 @@ def test_create_dataset_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_dataset in client._transport._wrapped_methods + assert client._transport.get_project in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc + client._transport._wrapped_methods[client._transport.get_project] = mock_rpc request = {} - client.create_dataset(request) + client.get_project(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_dataset(request) + client.get_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1340,42 +1253,37 @@ def test_create_dataset_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_dataset_empty_call_async(): +async def test_get_project_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gmm_dataset.Dataset( + project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) - response = await client.create_dataset() + response = await client.get_project() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.CreateDatasetRequest() + assert args[0] == project_service.GetProjectRequest() @pytest.mark.asyncio -async def test_create_dataset_async_use_cached_wrapped_rpc( +async def test_get_project_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1386,7 +1294,7 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_dataset + client._client._transport.get_project in client._client._transport._wrapped_methods ) @@ -1398,16 +1306,16 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.create_dataset + client._client._transport.get_project ] = mock_object request = {} - await client.create_dataset(request) + await client.get_project(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.create_dataset(request) + await client.get_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1415,11 +1323,10 @@ def __await__(self): @pytest.mark.asyncio -async def test_create_dataset_async( - transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.CreateDatasetRequest, +async def test_get_project_async( + transport: str = "grpc_asyncio", request_type=project_service.GetProjectRequest ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1429,58 +1336,50 @@ async def test_create_dataset_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gmm_dataset.Dataset( + project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) - response = await client.create_dataset(request) + response = await client.get_project(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.CreateDatasetRequest() + request = project_service.GetProjectRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gmm_dataset.Dataset) + assert isinstance(response, project.Project) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == gmm_dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] @pytest.mark.asyncio -async def test_create_dataset_async_from_dict(): - await test_create_dataset_async(request_type=dict) +async def test_get_project_async_from_dict(): + await test_get_project_async(request_type=dict) -def test_create_dataset_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.CreateDatasetRequest() + request = project_service.GetProjectRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: - call.return_value = gmm_dataset.Dataset() - client.create_dataset(request) + with mock.patch.object(type(client.transport.get_project), "__call__") as call: + call.return_value = project.Project() + client.get_project(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1491,26 +1390,26 @@ def test_create_dataset_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_dataset_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_project_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.CreateDatasetRequest() + request = project_service.GetProjectRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) - await client.create_dataset(request) + with mock.patch.object(type(client.transport.get_project), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(project.Project()) + await client.get_project(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1521,109 +1420,99 @@ async def test_create_dataset_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_dataset_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gmm_dataset.Dataset() + call.return_value = project.Project() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_dataset( - parent="parent_value", - dataset=gmm_dataset.Dataset(name="name_value"), + client.get_project( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dataset - mock_val = gmm_dataset.Dataset(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_dataset_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_flattened_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_dataset( - maps_platform_datasets.CreateDatasetRequest(), - parent="parent_value", - dataset=gmm_dataset.Dataset(name="name_value"), + client.get_project( + project_service.GetProjectRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_dataset_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_project_flattened_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.get_project), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gmm_dataset.Dataset() + call.return_value = project.Project() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(project.Project()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_dataset( - parent="parent_value", - dataset=gmm_dataset.Dataset(name="name_value"), + response = await client.get_project( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dataset - mock_val = gmm_dataset.Dataset(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_dataset_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_project_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_dataset( - maps_platform_datasets.CreateDatasetRequest(), - parent="parent_value", - dataset=gmm_dataset.Dataset(name="name_value"), + await client.get_project( + project_service.GetProjectRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.UpdateDatasetMetadataRequest, + project_service.AcceptTermsRequest, dict, ], ) -def test_update_dataset_metadata(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1633,63 +1522,51 @@ def test_update_dataset_metadata(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gmm_dataset.Dataset( + call.return_value = gcr_project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) - response = client.update_dataset_metadata(request) + response = client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.UpdateDatasetMetadataRequest() + request = project_service.AcceptTermsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gmm_dataset.Dataset) + assert isinstance(response, gcr_project.Project) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == gmm_dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] -def test_update_dataset_metadata_empty_call(): +def test_accept_terms_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_dataset_metadata() + client.accept_terms() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + assert args[0] == project_service.AcceptTermsRequest() -def test_update_dataset_metadata_non_empty_request_with_auto_populated_field(): +def test_accept_terms_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1697,26 +1574,28 @@ def test_update_dataset_metadata_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.UpdateDatasetMetadataRequest() + request = project_service.AcceptTermsRequest( + project="project_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_dataset_metadata(request=request) + client.accept_terms(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + assert args[0] == project_service.AcceptTermsRequest( + project="project_value", + ) -def test_update_dataset_metadata_use_cached_wrapped_rpc(): +def test_accept_terms_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1726,26 +1605,21 @@ def test_update_dataset_metadata_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_dataset_metadata - in client._transport._wrapped_methods - ) + assert client._transport.accept_terms in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_dataset_metadata - ] = mock_rpc + client._transport._wrapped_methods[client._transport.accept_terms] = mock_rpc request = {} - client.update_dataset_metadata(request) + client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_dataset_metadata(request) + client.accept_terms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1753,44 +1627,37 @@ def test_update_dataset_metadata_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_dataset_metadata_empty_call_async(): +async def test_accept_terms_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gmm_dataset.Dataset( + gcr_project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) - response = await client.update_dataset_metadata() + response = await client.accept_terms() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + assert args[0] == project_service.AcceptTermsRequest() @pytest.mark.asyncio -async def test_update_dataset_metadata_async_use_cached_wrapped_rpc( +async def test_accept_terms_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1801,7 +1668,7 @@ async def test_update_dataset_metadata_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_dataset_metadata + client._client._transport.accept_terms in client._client._transport._wrapped_methods ) @@ -1813,16 +1680,16 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.update_dataset_metadata + client._client._transport.accept_terms ] = mock_object request = {} - await client.update_dataset_metadata(request) + await client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.update_dataset_metadata(request) + await client.accept_terms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1830,11 +1697,10 @@ def __await__(self): @pytest.mark.asyncio -async def test_update_dataset_metadata_async( - transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +async def test_accept_terms_async( + transport: str = "grpc_asyncio", request_type=project_service.AcceptTermsRequest ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1844,62 +1710,50 @@ async def test_update_dataset_metadata_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gmm_dataset.Dataset( + gcr_project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) - response = await client.update_dataset_metadata(request) + response = await client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.UpdateDatasetMetadataRequest() + request = project_service.AcceptTermsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gmm_dataset.Dataset) + assert isinstance(response, gcr_project.Project) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == gmm_dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] @pytest.mark.asyncio -async def test_update_dataset_metadata_async_from_dict(): - await test_update_dataset_metadata_async(request_type=dict) +async def test_accept_terms_async_from_dict(): + await test_accept_terms_async(request_type=dict) -def test_update_dataset_metadata_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.UpdateDatasetMetadataRequest() + request = project_service.AcceptTermsRequest() - request.dataset.name = "name_value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: - call.return_value = gmm_dataset.Dataset() - client.update_dataset_metadata(request) + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: + call.return_value = gcr_project.Project() + client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1910,28 +1764,26 @@ def test_update_dataset_metadata_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "dataset.name=name_value", + "project=project_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_dataset_metadata_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_accept_terms_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.UpdateDatasetMetadataRequest() + request = project_service.AcceptTermsRequest() - request.dataset.name = "name_value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) - await client.update_dataset_metadata(request) + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcr_project.Project()) + await client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1942,113 +1794,99 @@ async def test_update_dataset_metadata_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "dataset.name=name_value", + "project=project_value", ) in kw["metadata"] -def test_update_dataset_metadata_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gmm_dataset.Dataset() + call.return_value = gcr_project.Project() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_dataset_metadata( - dataset=gmm_dataset.Dataset(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.accept_terms( + project="project_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].dataset - mock_val = gmm_dataset.Dataset(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].project + mock_val = "project_value" assert arg == mock_val -def test_update_dataset_metadata_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_flattened_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_dataset_metadata( - maps_platform_datasets.UpdateDatasetMetadataRequest(), - dataset=gmm_dataset.Dataset(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.accept_terms( + project_service.AcceptTermsRequest(), + project="project_value", ) @pytest.mark.asyncio -async def test_update_dataset_metadata_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_accept_terms_flattened_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset_metadata), "__call__" - ) as call: + with mock.patch.object(type(client.transport.accept_terms), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gmm_dataset.Dataset() + call.return_value = gcr_project.Project() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcr_project.Project()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_dataset_metadata( - dataset=gmm_dataset.Dataset(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.accept_terms( + project="project_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].dataset - mock_val = gmm_dataset.Dataset(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].project + mock_val = "project_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_dataset_metadata_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_accept_terms_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_dataset_metadata( - maps_platform_datasets.UpdateDatasetMetadataRequest(), - dataset=gmm_dataset.Dataset(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.accept_terms( + project_service.AcceptTermsRequest(), + project="project_value", ) @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.GetDatasetRequest, + project_service.EnrollSolutionRequest, dict, ], ) -def test_get_dataset(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_enroll_solution(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2058,59 +1896,44 @@ def test_get_dataset(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset( - name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=dataset.State.STATE_IMPORTING, - version_description="version_description_value", - ) - response = client.get_dataset(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.GetDatasetRequest() + request = project_service.EnrollSolutionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert isinstance(response, future.Future) -def test_get_dataset_empty_call(): +def test_enroll_solution_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_dataset() + client.enroll_solution() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.GetDatasetRequest() + assert args[0] == project_service.EnrollSolutionRequest() -def test_get_dataset_non_empty_request_with_auto_populated_field(): +def test_enroll_solution_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -2118,28 +1941,28 @@ def test_get_dataset_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.GetDatasetRequest( - name="name_value", + request = project_service.EnrollSolutionRequest( + project="project_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_dataset(request=request) + client.enroll_solution(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.GetDatasetRequest( - name="name_value", + assert args[0] == project_service.EnrollSolutionRequest( + project="project_value", ) -def test_get_dataset_use_cached_wrapped_rpc(): +def test_enroll_solution_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -2149,21 +1972,25 @@ def test_get_dataset_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_dataset in client._transport._wrapped_methods + assert client._transport.enroll_solution in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc + client._transport._wrapped_methods[client._transport.enroll_solution] = mock_rpc request = {} - client.get_dataset(request) + client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_dataset(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enroll_solution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2171,42 +1998,34 @@ def test_get_dataset_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_dataset_empty_call_async(): +async def test_enroll_solution_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dataset.Dataset( - name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=dataset.State.STATE_IMPORTING, - version_description="version_description_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_dataset() + response = await client.enroll_solution() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.GetDatasetRequest() + assert args[0] == project_service.EnrollSolutionRequest() @pytest.mark.asyncio -async def test_get_dataset_async_use_cached_wrapped_rpc( +async def test_enroll_solution_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2217,7 +2036,7 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_dataset + client._client._transport.enroll_solution in client._client._transport._wrapped_methods ) @@ -2229,16 +2048,20 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.get_dataset + client._client._transport.enroll_solution ] = mock_object request = {} - await client.get_dataset(request) + await client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_dataset(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.enroll_solution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2246,11 +2069,10 @@ def __await__(self): @pytest.mark.asyncio -async def test_get_dataset_async( - transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.GetDatasetRequest, +async def test_enroll_solution_async( + transport: str = "grpc_asyncio", request_type=project_service.EnrollSolutionRequest ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2260,58 +2082,43 @@ async def test_get_dataset_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dataset.Dataset( - name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=dataset.State.STATE_IMPORTING, - version_description="version_description_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_dataset(request) + response = await client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.GetDatasetRequest() + request = project_service.EnrollSolutionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_dataset_async_from_dict(): - await test_get_dataset_async(request_type=dict) +async def test_enroll_solution_async_from_dict(): + await test_enroll_solution_async(request_type=dict) -def test_get_dataset_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_enroll_solution_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.GetDatasetRequest() + request = project_service.EnrollSolutionRequest() - request.name = "name_value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: - call.return_value = dataset.Dataset() - client.get_dataset(request) + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2322,26 +2129,28 @@ def test_get_dataset_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "project=project_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_dataset_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_enroll_solution_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.GetDatasetRequest() + request = project_service.EnrollSolutionRequest() - request.name = "name_value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) - await client.get_dataset(request) + with mock.patch.object(type(client.transport.enroll_solution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2352,99 +2161,19 @@ async def test_get_dataset_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "project=project_value", ) in kw["metadata"] -def test_get_dataset_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dataset( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_dataset_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dataset( - maps_platform_datasets.GetDatasetRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_dataset_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dataset( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_dataset_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dataset( - maps_platform_datasets.GetDatasetRequest(), - name="name_value", - ) - - @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.ListDatasetVersionsRequest, + project_service.ListEnrolledSolutionsRequest, dict, ], ) -def test_list_dataset_versions(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2455,50 +2184,52 @@ def test_list_dataset_versions(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = maps_platform_datasets.ListDatasetVersionsResponse( - next_page_token="next_page_token_value", + call.return_value = project_service.ListEnrolledSolutionsResponse( + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) - response = client.list_dataset_versions(request) + response = client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.ListDatasetVersionsRequest() + request = project_service.ListEnrolledSolutionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetVersionsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, project_service.ListEnrolledSolutionsResponse) + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] -def test_list_dataset_versions_empty_call(): +def test_list_enrolled_solutions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_dataset_versions() + client.list_enrolled_solutions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.ListDatasetVersionsRequest() + assert args[0] == project_service.ListEnrolledSolutionsRequest() -def test_list_dataset_versions_non_empty_request_with_auto_populated_field(): +def test_list_enrolled_solutions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -2506,32 +2237,30 @@ def test_list_dataset_versions_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.ListDatasetVersionsRequest( - name="name_value", - page_token="page_token_value", + request = project_service.ListEnrolledSolutionsRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_dataset_versions(request=request) + client.list_enrolled_solutions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.ListDatasetVersionsRequest( - name="name_value", - page_token="page_token_value", + assert args[0] == project_service.ListEnrolledSolutionsRequest( + parent="parent_value", ) -def test_list_dataset_versions_use_cached_wrapped_rpc(): +def test_list_enrolled_solutions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -2542,7 +2271,7 @@ def test_list_dataset_versions_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_dataset_versions + client._transport.list_enrolled_solutions in client._transport._wrapped_methods ) @@ -2552,15 +2281,15 @@ def test_list_dataset_versions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_dataset_versions + client._transport.list_enrolled_solutions ] = mock_rpc request = {} - client.list_dataset_versions(request) + client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_dataset_versions(request) + client.list_enrolled_solutions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2568,38 +2297,38 @@ def test_list_dataset_versions_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_dataset_versions_empty_call_async(): +async def test_list_enrolled_solutions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetVersionsResponse( - next_page_token="next_page_token_value", + project_service.ListEnrolledSolutionsResponse( + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) - response = await client.list_dataset_versions() + response = await client.list_enrolled_solutions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.ListDatasetVersionsRequest() + assert args[0] == project_service.ListEnrolledSolutionsRequest() @pytest.mark.asyncio -async def test_list_dataset_versions_async_use_cached_wrapped_rpc( +async def test_list_enrolled_solutions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2610,7 +2339,7 @@ async def test_list_dataset_versions_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_dataset_versions + client._client._transport.list_enrolled_solutions in client._client._transport._wrapped_methods ) @@ -2622,16 +2351,16 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.list_dataset_versions + client._client._transport.list_enrolled_solutions ] = mock_object request = {} - await client.list_dataset_versions(request) + await client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_dataset_versions(request) + await client.list_enrolled_solutions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2639,11 +2368,11 @@ def __await__(self): @pytest.mark.asyncio -async def test_list_dataset_versions_async( +async def test_list_enrolled_solutions_async( transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.ListDatasetVersionsRequest, + request_type=project_service.ListEnrolledSolutionsRequest, ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2654,49 +2383,51 @@ async def test_list_dataset_versions_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetVersionsResponse( - next_page_token="next_page_token_value", + project_service.ListEnrolledSolutionsResponse( + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) - response = await client.list_dataset_versions(request) + response = await client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.ListDatasetVersionsRequest() + request = project_service.ListEnrolledSolutionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetVersionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, project_service.ListEnrolledSolutionsResponse) + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] @pytest.mark.asyncio -async def test_list_dataset_versions_async_from_dict(): - await test_list_dataset_versions_async(request_type=dict) +async def test_list_enrolled_solutions_async_from_dict(): + await test_list_enrolled_solutions_async(request_type=dict) -def test_list_dataset_versions_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.ListDatasetVersionsRequest() + request = project_service.ListEnrolledSolutionsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: - call.return_value = maps_platform_datasets.ListDatasetVersionsResponse() - client.list_dataset_versions(request) + call.return_value = project_service.ListEnrolledSolutionsResponse() + client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2707,30 +2438,30 @@ def test_list_dataset_versions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_dataset_versions_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_list_enrolled_solutions_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.ListDatasetVersionsRequest() + request = project_service.ListEnrolledSolutionsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetVersionsResponse() + project_service.ListEnrolledSolutionsResponse() ) - await client.list_dataset_versions(request) + await client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2741,353 +2472,159 @@ async def test_list_dataset_versions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_list_dataset_versions_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + call.return_value = project_service.ListEnrolledSolutionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_dataset_versions( - name="name_value", + client.list_enrolled_solutions( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_list_dataset_versions_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions_flattened_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_dataset_versions( - maps_platform_datasets.ListDatasetVersionsRequest(), - name="name_value", + client.list_enrolled_solutions( + project_service.ListEnrolledSolutionsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_list_dataset_versions_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_list_enrolled_solutions_flattened_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.list_enrolled_solutions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + call.return_value = project_service.ListEnrolledSolutionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetVersionsResponse() + project_service.ListEnrolledSolutionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_dataset_versions( - name="name_value", + response = await client.list_enrolled_solutions( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_dataset_versions_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_list_enrolled_solutions_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_dataset_versions( - maps_platform_datasets.ListDatasetVersionsRequest(), - name="name_value", + await client.list_enrolled_solutions( + project_service.ListEnrolledSolutionsRequest(), + parent="parent_value", ) -def test_list_dataset_versions_pager(transport_name: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +@pytest.mark.parametrize( + "request_type", + [ + project_service.GetLoggingConfigRequest, + dict, + ], +) +def test_get_logging_config(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" + type(client.transport.get_logging_config), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = project.LoggingConfig( + name="name_value", ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), - ) - pager = client.list_dataset_versions(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) for i in results) - - -def test_list_dataset_versions_pages(transport_name: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dataset_versions), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dataset_versions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_dataset_versions_async_pager(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dataset_versions), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dataset_versions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataset.Dataset) for i in responses) - - -@pytest.mark.asyncio -async def test_list_dataset_versions_async_pages(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dataset_versions), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_dataset_versions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - maps_platform_datasets.ListDatasetsRequest, - dict, - ], -) -def test_list_datasets(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = maps_platform_datasets.ListDatasetsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_datasets(request) + response = client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.ListDatasetsRequest() + request = project_service.GetLoggingConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, project.LoggingConfig) + assert response.name == "name_value" -def test_list_datasets_empty_call(): +def test_get_logging_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_datasets() + client.get_logging_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.ListDatasetsRequest() + assert args[0] == project_service.GetLoggingConfigRequest() -def test_list_datasets_non_empty_request_with_auto_populated_field(): +def test_get_logging_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -3095,30 +2632,30 @@ def test_list_datasets_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.ListDatasetsRequest( - parent="parent_value", - page_token="page_token_value", + request = project_service.GetLoggingConfigRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_datasets(request=request) + client.get_logging_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.ListDatasetsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == project_service.GetLoggingConfigRequest( + name="name_value", ) -def test_list_datasets_use_cached_wrapped_rpc(): +def test_get_logging_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -3128,21 +2665,25 @@ def test_list_datasets_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_datasets in client._transport._wrapped_methods + assert ( + client._transport.get_logging_config in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_logging_config + ] = mock_rpc request = {} - client.list_datasets(request) + client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_datasets(request) + client.get_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3150,36 +2691,38 @@ def test_list_datasets_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_datasets_empty_call_async(): +async def test_get_logging_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetsResponse( - next_page_token="next_page_token_value", + project.LoggingConfig( + name="name_value", ) ) - response = await client.list_datasets() + response = await client.get_logging_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.ListDatasetsRequest() + assert args[0] == project_service.GetLoggingConfigRequest() @pytest.mark.asyncio -async def test_list_datasets_async_use_cached_wrapped_rpc( +async def test_get_logging_config_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3190,7 +2733,7 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_datasets + client._client._transport.get_logging_config in client._client._transport._wrapped_methods ) @@ -3202,16 +2745,16 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.list_datasets + client._client._transport.get_logging_config ] = mock_object request = {} - await client.list_datasets(request) + await client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_datasets(request) + await client.get_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3219,11 +2762,11 @@ def __await__(self): @pytest.mark.asyncio -async def test_list_datasets_async( +async def test_get_logging_config_async( transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.ListDatasetsRequest, + request_type=project_service.GetLoggingConfigRequest, ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3233,46 +2776,50 @@ async def test_list_datasets_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetsResponse( - next_page_token="next_page_token_value", + project.LoggingConfig( + name="name_value", ) ) - response = await client.list_datasets(request) + response = await client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.ListDatasetsRequest() + request = project_service.GetLoggingConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, project.LoggingConfig) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_datasets_async_from_dict(): - await test_list_datasets_async(request_type=dict) +async def test_get_logging_config_async_from_dict(): + await test_get_logging_config_async(request_type=dict) -def test_list_datasets_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.ListDatasetsRequest() + request = project_service.GetLoggingConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: - call.return_value = maps_platform_datasets.ListDatasetsResponse() - client.list_datasets(request) + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: + call.return_value = project.LoggingConfig() + client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3283,28 +2830,30 @@ def test_list_datasets_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_datasets_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_logging_config_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.ListDatasetsRequest() + request = project_service.GetLoggingConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetsResponse() + project.LoggingConfig() ) - await client.list_datasets(request) + await client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3315,291 +2864,105 @@ async def test_list_datasets_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_datasets_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = maps_platform_datasets.ListDatasetsResponse() + call.return_value = project.LoggingConfig() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_datasets( - parent="parent_value", + client.get_logging_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_datasets_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_flattened_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_datasets( - maps_platform_datasets.ListDatasetsRequest(), - parent="parent_value", + client.get_logging_config( + project_service.GetLoggingConfigRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_datasets_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_logging_config_flattened_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + with mock.patch.object( + type(client.transport.get_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = maps_platform_datasets.ListDatasetsResponse() + call.return_value = project.LoggingConfig() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - maps_platform_datasets.ListDatasetsResponse() + project.LoggingConfig() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_datasets( - parent="parent_value", + response = await client.get_logging_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_datasets_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_logging_config_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_datasets( - maps_platform_datasets.ListDatasetsRequest(), - parent="parent_value", - ) - - -def test_list_datasets_pager(transport_name: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_datasets(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) for i in results) - - -def test_list_datasets_pages(transport_name: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_datasets(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_datasets_async_pager(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_datasets( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataset.Dataset) for i in responses) - - -@pytest.mark.asyncio -async def test_list_datasets_async_pages(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, + await client.get_logging_config( + project_service.GetLoggingConfigRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_datasets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.DeleteDatasetRequest, + project_service.UpdateLoggingConfigRequest, dict, ], ) -def test_delete_dataset(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3609,44 +2972,51 @@ def test_delete_dataset(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dataset(request) + call.return_value = project.LoggingConfig( + name="name_value", + ) + response = client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.DeleteDatasetRequest() + request = project_service.UpdateLoggingConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, project.LoggingConfig) + assert response.name == "name_value" -def test_delete_dataset_empty_call(): +def test_update_logging_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_dataset() + client.update_logging_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + assert args[0] == project_service.UpdateLoggingConfigRequest() -def test_delete_dataset_non_empty_request_with_auto_populated_field(): +def test_update_logging_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -3654,28 +3024,26 @@ def test_delete_dataset_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.DeleteDatasetRequest( - name="name_value", - ) + request = project_service.UpdateLoggingConfigRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_dataset(request=request) + client.update_logging_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.DeleteDatasetRequest( - name="name_value", - ) + assert args[0] == project_service.UpdateLoggingConfigRequest() -def test_delete_dataset_use_cached_wrapped_rpc(): +def test_update_logging_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -3685,21 +3053,26 @@ def test_delete_dataset_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_dataset in client._transport._wrapped_methods + assert ( + client._transport.update_logging_config + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_logging_config + ] = mock_rpc request = {} - client.delete_dataset(request) + client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_dataset(request) + client.update_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3707,32 +3080,38 @@ def test_delete_dataset_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_dataset_empty_call_async(): +async def test_update_logging_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dataset() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.LoggingConfig( + name="name_value", + ) + ) + response = await client.update_logging_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + assert args[0] == project_service.UpdateLoggingConfigRequest() @pytest.mark.asyncio -async def test_delete_dataset_async_use_cached_wrapped_rpc( +async def test_update_logging_config_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3743,7 +3122,7 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_dataset + client._client._transport.update_logging_config in client._client._transport._wrapped_methods ) @@ -3755,16 +3134,16 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_dataset + client._client._transport.update_logging_config ] = mock_object request = {} - await client.delete_dataset(request) + await client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.delete_dataset(request) + await client.update_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3772,11 +3151,11 @@ def __await__(self): @pytest.mark.asyncio -async def test_delete_dataset_async( +async def test_update_logging_config_async( transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.DeleteDatasetRequest, + request_type=project_service.UpdateLoggingConfigRequest, ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3786,41 +3165,50 @@ async def test_delete_dataset_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dataset(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.LoggingConfig( + name="name_value", + ) + ) + response = await client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.DeleteDatasetRequest() + request = project_service.UpdateLoggingConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, project.LoggingConfig) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_delete_dataset_async_from_dict(): - await test_delete_dataset_async(request_type=dict) +async def test_update_logging_config_async_from_dict(): + await test_update_logging_config_async(request_type=dict) -def test_delete_dataset_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.DeleteDatasetRequest() + request = project_service.UpdateLoggingConfigRequest() - request.name = "name_value" + request.logging_config.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: - call.return_value = None - client.delete_dataset(request) + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: + call.return_value = project.LoggingConfig() + client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3831,26 +3219,30 @@ def test_delete_dataset_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "logging_config.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_dataset_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_update_logging_config_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.DeleteDatasetRequest() + request = project_service.UpdateLoggingConfigRequest() - request.name = "name_value" + request.logging_config.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dataset(request) + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.LoggingConfig() + ) + await client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3861,99 +3253,115 @@ async def test_delete_dataset_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "logging_config.name=name_value", ) in kw["metadata"] -def test_delete_dataset_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = project.LoggingConfig() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_dataset( - name="name_value", + client.update_logging_config( + logging_config=project.LoggingConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].logging_config + mock_val = project.LoggingConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_dataset_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_flattened_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_dataset( - maps_platform_datasets.DeleteDatasetRequest(), - name="name_value", + client.update_logging_config( + project_service.UpdateLoggingConfigRequest(), + logging_config=project.LoggingConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_dataset_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_update_logging_config_flattened_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + with mock.patch.object( + type(client.transport.update_logging_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = project.LoggingConfig() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.LoggingConfig() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_dataset( - name="name_value", + response = await client.update_logging_config( + logging_config=project.LoggingConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].logging_config + mock_val = project.LoggingConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_dataset_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_update_logging_config_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_dataset( - maps_platform_datasets.DeleteDatasetRequest(), - name="name_value", + await client.update_logging_config( + project_service.UpdateLoggingConfigRequest(), + logging_config=project.LoggingConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.DeleteDatasetVersionRequest, + project_service.GetAlertConfigRequest, dict, ], ) -def test_delete_dataset_version(request_type, transport: str = "grpc"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_alert_config(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3963,48 +3371,47 @@ def test_delete_dataset_version(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dataset_version(request) + call.return_value = project.AlertConfig( + name="name_value", + ) + response = client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.DeleteDatasetVersionRequest() + request = project_service.GetAlertConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, project.AlertConfig) + assert response.name == "name_value" -def test_delete_dataset_version_empty_call(): +def test_get_alert_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_dataset_version() + client.get_alert_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.DeleteDatasetVersionRequest() + assert args[0] == project_service.GetAlertConfigRequest() -def test_delete_dataset_version_non_empty_request_with_auto_populated_field(): +def test_get_alert_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -4012,30 +3419,28 @@ def test_delete_dataset_version_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = maps_platform_datasets.DeleteDatasetVersionRequest( + request = project_service.GetAlertConfigRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_dataset_version(request=request) + client.get_alert_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.DeleteDatasetVersionRequest( + assert args[0] == project_service.GetAlertConfigRequest( name="name_value", ) -def test_delete_dataset_version_use_cached_wrapped_rpc(): +def test_get_alert_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -4045,10 +3450,7 @@ def test_delete_dataset_version_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_dataset_version - in client._transport._wrapped_methods - ) + assert client._transport.get_alert_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -4056,15 +3458,15 @@ def test_delete_dataset_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_dataset_version + client._transport.get_alert_config ] = mock_rpc request = {} - client.delete_dataset_version(request) + client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_dataset_version(request) + client.get_alert_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4072,34 +3474,36 @@ def test_delete_dataset_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_dataset_version_empty_call_async(): +async def test_get_alert_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dataset_version() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.AlertConfig( + name="name_value", + ) + ) + response = await client.get_alert_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == maps_platform_datasets.DeleteDatasetVersionRequest() + assert args[0] == project_service.GetAlertConfigRequest() @pytest.mark.asyncio -async def test_delete_dataset_version_async_use_cached_wrapped_rpc( +async def test_get_alert_config_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4110,7 +3514,7 @@ async def test_delete_dataset_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_dataset_version + client._client._transport.get_alert_config in client._client._transport._wrapped_methods ) @@ -4122,16 +3526,16 @@ def __await__(self): mock_object = AwaitableMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_dataset_version + client._client._transport.get_alert_config ] = mock_object request = {} - await client.delete_dataset_version(request) + await client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.delete_dataset_version(request) + await client.get_alert_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4139,11 +3543,10 @@ def __await__(self): @pytest.mark.asyncio -async def test_delete_dataset_version_async( - transport: str = "grpc_asyncio", - request_type=maps_platform_datasets.DeleteDatasetVersionRequest, +async def test_get_alert_config_async( + transport: str = "grpc_asyncio", request_type=project_service.GetAlertConfigRequest ): - client = MapsPlatformDatasetsV1AlphaAsyncClient( + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4153,45 +3556,46 @@ async def test_delete_dataset_version_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dataset_version(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.AlertConfig( + name="name_value", + ) + ) + response = await client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = maps_platform_datasets.DeleteDatasetVersionRequest() + request = project_service.GetAlertConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, project.AlertConfig) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_delete_dataset_version_async_from_dict(): - await test_delete_dataset_version_async(request_type=dict) +async def test_get_alert_config_async_from_dict(): + await test_get_alert_config_async(request_type=dict) -def test_delete_dataset_version_field_headers(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_alert_config_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.DeleteDatasetVersionRequest() + request = project_service.GetAlertConfigRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: - call.return_value = None - client.delete_dataset_version(request) + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: + call.return_value = project.AlertConfig() + client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4207,23 +3611,21 @@ def test_delete_dataset_version_field_headers(): @pytest.mark.asyncio -async def test_delete_dataset_version_field_headers_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_alert_config_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = maps_platform_datasets.DeleteDatasetVersionRequest() + request = project_service.GetAlertConfigRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dataset_version(request) + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(project.AlertConfig()) + await client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4238,20 +3640,18 @@ async def test_delete_dataset_version_field_headers_async(): ) in kw["metadata"] -def test_delete_dataset_version_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_alert_config_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = project.AlertConfig() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_dataset_version( + client.get_alert_config( name="name_value", ) @@ -4264,37 +3664,35 @@ def test_delete_dataset_version_flattened(): assert arg == mock_val -def test_delete_dataset_version_flattened_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_alert_config_flattened_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_dataset_version( - maps_platform_datasets.DeleteDatasetVersionRequest(), + client.get_alert_config( + project_service.GetAlertConfigRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_dataset_version_flattened_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_alert_config_flattened_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_alert_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = project.AlertConfig() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(project.AlertConfig()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_dataset_version( + response = await client.get_alert_config( name="name_value", ) @@ -4308,16 +3706,16 @@ async def test_delete_dataset_version_flattened_async(): @pytest.mark.asyncio -async def test_delete_dataset_version_flattened_error_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_get_alert_config_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_dataset_version( - maps_platform_datasets.DeleteDatasetVersionRequest(), + await client.get_alert_config( + project_service.GetAlertConfigRequest(), name="name_value", ) @@ -4325,143 +3723,95 @@ async def test_delete_dataset_version_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.CreateDatasetRequest, + project_service.UpdateAlertConfigRequest, dict, ], ) -def test_create_dataset_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_alert_config(request_type, transport: str = "grpc"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request_init["dataset"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "version_id": "version_id_value", - "usage": [1], - "local_file_source": {"filename": "filename_value", "file_format": 1}, - "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, - "status": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "version_create_time": {}, - "version_description": "version_description_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Determine if the message type is proto-plus or protobuf - test_field = maps_platform_datasets.CreateDatasetRequest.meta.fields["dataset"] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = project.AlertConfig( + name="name_value", + ) + response = client.update_alert_config(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = project_service.UpdateAlertConfigRequest() + assert args[0] == request - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + assert isinstance(response, project.AlertConfig) + assert response.name == "name_value" - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_update_alert_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - subfields_not_in_runtime = [] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_alert_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.UpdateAlertConfigRequest() - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_update_alert_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dataset"][field])): - del request_init["dataset"][field][i][subfield] - else: - del request_init["dataset"][field][subfield] - request = request_type(**request_init) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = project_service.UpdateAlertConfigRequest() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gmm_dataset.Dataset( - name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gmm_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gmm_dataset.Dataset) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == gmm_dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + client.update_alert_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.UpdateAlertConfigRequest() -def test_create_dataset_rest_use_cached_wrapped_rpc(): +def test_update_alert_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4469,402 +3819,350 @@ def test_create_dataset_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_dataset in client._transport._wrapped_methods + assert ( + client._transport.update_alert_config in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.update_alert_config + ] = mock_rpc request = {} - client.create_dataset(request) + client.update_alert_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_dataset(request) + client.update_alert_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_dataset_rest_required_fields( - request_type=maps_platform_datasets.CreateDatasetRequest, -): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_update_alert_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.AlertConfig( + name="name_value", + ) + ) + response = await client.update_alert_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == project_service.UpdateAlertConfigRequest() - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_update_alert_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Ensure method has been cached + assert ( + client._client._transport.update_alert_config + in client._client._transport._wrapped_methods + ) - # Designate an appropriate value for the returned response. - return_value = gmm_dataset.Dataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) - response_value = Response() - response_value.status_code = 200 + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_alert_config + ] = mock_object - # Convert return value to protobuf type - return_value = gmm_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.update_alert_config(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response = client.create_dataset(request) + await client.update_alert_config(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_create_dataset_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_update_alert_config_async( + transport: str = "grpc_asyncio", + request_type=project_service.UpdateAlertConfigRequest, +): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.create_dataset._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "dataset", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + project.AlertConfig( + name="name_value", ) ) - ) + response = await client.update_alert_config(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = project_service.UpdateAlertConfigRequest() + assert args[0] == request -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dataset_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + # Establish that the response is the type that we expect. + assert isinstance(response, project.AlertConfig) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_alert_config_async_from_dict(): + await test_update_alert_config_async(request_type=dict) + + +def test_update_alert_config_field_headers(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "post_create_dataset" - ) as post, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_create_dataset" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = maps_platform_datasets.CreateDatasetRequest.pb( - maps_platform_datasets.CreateDatasetRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gmm_dataset.Dataset.to_json(gmm_dataset.Dataset()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = project_service.UpdateAlertConfigRequest() - request = maps_platform_datasets.CreateDatasetRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gmm_dataset.Dataset() + request.alert_config.name = "name_value" - client.create_dataset( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + call.return_value = project.AlertConfig() + client.update_alert_config(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "alert_config.name=name_value", + ) in kw["metadata"] -def test_create_dataset_rest_bad_request( - transport: str = "rest", request_type=maps_platform_datasets.CreateDatasetRequest -): - client = MapsPlatformDatasetsV1AlphaClient( + +@pytest.mark.asyncio +async def test_update_alert_config_field_headers_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = project_service.UpdateAlertConfigRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dataset(request) + request.alert_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(project.AlertConfig()) + await client.update_alert_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "alert_config.name=name_value", + ) in kw["metadata"] -def test_create_dataset_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_alert_config_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gmm_dataset.Dataset() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = project.AlertConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_alert_config( + alert_config=project.AlertConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].alert_config + mock_val = project.AlertConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - dataset=gmm_dataset.Dataset(name="name_value"), + +def test_update_alert_config_flattened_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_alert_config( + project_service.UpdateAlertConfigRequest(), + alert_config=project.AlertConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gmm_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.create_dataset(**mock_args) +@pytest.mark.asyncio +async def test_update_alert_config_flattened_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = project.AlertConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(project.AlertConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_alert_config( + alert_config=project.AlertConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*}/datasets" % client.transport._host, args[1] - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].alert_config + mock_val = project.AlertConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_create_dataset_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( +@pytest.mark.asyncio +async def test_update_alert_config_flattened_error_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_dataset( - maps_platform_datasets.CreateDatasetRequest(), - parent="parent_value", - dataset=gmm_dataset.Dataset(name="name_value"), + await client.update_alert_config( + project_service.UpdateAlertConfigRequest(), + alert_config=project.AlertConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_dataset_rest_error(): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.UpdateDatasetMetadataRequest, + project_service.GetProjectRequest, dict, ], ) -def test_update_dataset_metadata_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"dataset": {"name": "projects/sample1/datasets/sample2"}} - request_init["dataset"] = { - "name": "projects/sample1/datasets/sample2", - "display_name": "display_name_value", - "description": "description_value", - "version_id": "version_id_value", - "usage": [1], - "local_file_source": {"filename": "filename_value", "file_format": 1}, - "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, - "status": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "version_create_time": {}, - "version_description": "version_description_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = maps_platform_datasets.UpdateDatasetMetadataRequest.meta.fields[ - "dataset" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dataset"][field])): - del request_init["dataset"][field][i][subfield] - else: - del request_init["dataset"][field][subfield] + request_init = {"name": "projects/sample1/retailProject"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gmm_dataset.Dataset( + return_value = project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=gmm_dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gmm_dataset.Dataset.pb(return_value) + return_value = project.Project.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_dataset_metadata(request) + response = client.get_project(request) # Establish that the response is the type that we expect. - assert isinstance(response, gmm_dataset.Dataset) + assert isinstance(response, project.Project) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == gmm_dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] -def test_update_dataset_metadata_rest_use_cached_wrapped_rpc(): +def test_get_project_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4874,39 +4172,35 @@ def test_update_dataset_metadata_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_dataset_metadata - in client._transport._wrapped_methods - ) + assert client._transport.get_project in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_dataset_metadata - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_project] = mock_rpc request = {} - client.update_dataset_metadata(request) + client.get_project(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_dataset_metadata(request) + client.get_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_dataset_metadata_rest_required_fields( - request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +def test_get_project_rest_required_fields( + request_type=project_service.GetProjectRequest, ): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + transport_class = transports.ProjectServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4917,28 +4211,30 @@ def test_update_dataset_metadata_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_dataset_metadata._get_unset_required_fields(jsonified_request) + ).get_project._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_dataset_metadata._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_project._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gmm_dataset.Dataset() + return_value = project.Project() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4950,62 +4246,59 @@ def test_update_dataset_metadata_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gmm_dataset.Dataset.pb(return_value) + return_value = project.Project.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_dataset_metadata(request) + response = client.get_project(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_dataset_metadata_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_get_project_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_dataset_metadata._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("dataset",))) + unset_fields = transport.get_project._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_dataset_metadata_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_get_project_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + else transports.ProjectServiceRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + client = ProjectServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, - "post_update_dataset_metadata", + transports.ProjectServiceRestInterceptor, "post_get_project" ) as post, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, - "pre_update_dataset_metadata", + transports.ProjectServiceRestInterceptor, "pre_get_project" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = maps_platform_datasets.UpdateDatasetMetadataRequest.pb( - maps_platform_datasets.UpdateDatasetMetadataRequest() + pb_message = project_service.GetProjectRequest.pb( + project_service.GetProjectRequest() ) transcode.return_value = { "method": "post", @@ -5017,17 +4310,17 @@ def test_update_dataset_metadata_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gmm_dataset.Dataset.to_json(gmm_dataset.Dataset()) + req.return_value._content = project.Project.to_json(project.Project()) - request = maps_platform_datasets.UpdateDatasetMetadataRequest() + request = project_service.GetProjectRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gmm_dataset.Dataset() + post.return_value = project.Project() - client.update_dataset_metadata( + client.get_project( request, metadata=[ ("key", "val"), @@ -5039,17 +4332,16 @@ def test_update_dataset_metadata_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_dataset_metadata_rest_bad_request( - transport: str = "rest", - request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +def test_get_project_rest_bad_request( + transport: str = "rest", request_type=project_service.GetProjectRequest ): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + request_init = {"name": "projects/sample1/retailProject"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5061,11 +4353,11 @@ def test_update_dataset_metadata_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_dataset_metadata(request) + client.get_project(request) -def test_update_dataset_metadata_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_rest_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5073,15 +4365,14 @@ def test_update_dataset_metadata_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gmm_dataset.Dataset() + return_value = project.Project() # get arguments that satisfy an http rule for this method - sample_request = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + sample_request = {"name": "projects/sample1/retailProject"} # get truthy value for each flattened field mock_args = dict( - dataset=gmm_dataset.Dataset(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -5089,25 +4380,25 @@ def test_update_dataset_metadata_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gmm_dataset.Dataset.pb(return_value) + return_value = project.Project.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_dataset_metadata(**mock_args) + client.get_project(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{dataset.name=projects/*/datasets/*}" % client.transport._host, + "%s/v2alpha/{name=projects/*/retailProject}" % client.transport._host, args[1], ) -def test_update_dataset_metadata_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5115,15 +4406,14 @@ def test_update_dataset_metadata_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_dataset_metadata( - maps_platform_datasets.UpdateDatasetMetadataRequest(), - dataset=gmm_dataset.Dataset(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_project( + project_service.GetProjectRequest(), + name="name_value", ) -def test_update_dataset_metadata_rest_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_project_rest_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5131,60 +4421,52 @@ def test_update_dataset_metadata_rest_error(): @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.GetDatasetRequest, + project_service.AcceptTermsRequest, dict, ], ) -def test_get_dataset_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"project": "projects/sample1/retailProject"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dataset.Dataset( + return_value = gcr_project.Project( name="name_value", - display_name="display_name_value", - description="description_value", - version_id="version_id_value", - usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], - status=dataset.State.STATE_IMPORTING, - version_description="version_description_value", + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) + return_value = gcr_project.Project.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_dataset(request) + response = client.accept_terms(request) # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) + assert isinstance(response, gcr_project.Project) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.version_id == "version_id_value" - assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] - assert response.status == dataset.State.STATE_IMPORTING - assert response.version_description == "version_description_value" + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] -def test_get_dataset_rest_use_cached_wrapped_rpc(): +def test_accept_terms_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5194,35 +4476,35 @@ def test_get_dataset_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_dataset in client._transport._wrapped_methods + assert client._transport.accept_terms in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc + client._transport._wrapped_methods[client._transport.accept_terms] = mock_rpc request = {} - client.get_dataset(request) + client.accept_terms(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_dataset(request) + client.accept_terms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_dataset_rest_required_fields( - request_type=maps_platform_datasets.GetDatasetRequest, +def test_accept_terms_rest_required_fields( + request_type=project_service.AcceptTermsRequest, ): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + transport_class = transports.ProjectServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["project"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5233,32 +4515,30 @@ def test_get_dataset_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_dataset._get_unset_required_fields(jsonified_request) + ).accept_terms._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["project"] = "project_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_dataset._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("published_usage",)) + ).accept_terms._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = dataset.Dataset() + return_value = gcr_project.Project() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5270,59 +4550,60 @@ def test_get_dataset_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) + return_value = gcr_project.Project.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_dataset(request) + response = client.accept_terms(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_dataset_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_accept_terms_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(("publishedUsage",)) & set(("name",))) + unset_fields = transport.accept_terms._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dataset_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_accept_terms_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + else transports.ProjectServiceRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + client = ProjectServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "post_get_dataset" + transports.ProjectServiceRestInterceptor, "post_accept_terms" ) as post, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_get_dataset" + transports.ProjectServiceRestInterceptor, "pre_accept_terms" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = maps_platform_datasets.GetDatasetRequest.pb( - maps_platform_datasets.GetDatasetRequest() + pb_message = project_service.AcceptTermsRequest.pb( + project_service.AcceptTermsRequest() ) transcode.return_value = { "method": "post", @@ -5334,17 +4615,17 @@ def test_get_dataset_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = dataset.Dataset.to_json(dataset.Dataset()) + req.return_value._content = gcr_project.Project.to_json(gcr_project.Project()) - request = maps_platform_datasets.GetDatasetRequest() + request = project_service.AcceptTermsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = dataset.Dataset() + post.return_value = gcr_project.Project() - client.get_dataset( + client.accept_terms( request, metadata=[ ("key", "val"), @@ -5356,16 +4637,16 @@ def test_get_dataset_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_dataset_rest_bad_request( - transport: str = "rest", request_type=maps_platform_datasets.GetDatasetRequest +def test_accept_terms_rest_bad_request( + transport: str = "rest", request_type=project_service.AcceptTermsRequest ): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"project": "projects/sample1/retailProject"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5377,11 +4658,11 @@ def test_get_dataset_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_dataset(request) + client.accept_terms(request) -def test_get_dataset_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_rest_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5389,14 +4670,14 @@ def test_get_dataset_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dataset.Dataset() + return_value = gcr_project.Project() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/datasets/sample2"} + sample_request = {"project": "projects/sample1/retailProject"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + project="project_value", ) mock_args.update(sample_request) @@ -5404,24 +4685,26 @@ def test_get_dataset_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) + return_value = gcr_project.Project.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_dataset(**mock_args) + client.accept_terms(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/datasets/*}" % client.transport._host, args[1] + "%s/v2alpha/{project=projects/*/retailProject}:acceptTerms" + % client.transport._host, + args[1], ) -def test_get_dataset_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5429,14 +4712,14 @@ def test_get_dataset_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_dataset( - maps_platform_datasets.GetDatasetRequest(), - name="name_value", + client.accept_terms( + project_service.AcceptTermsRequest(), + project="project_value", ) -def test_get_dataset_rest_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_accept_terms_rest_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5444,50 +4727,43 @@ def test_get_dataset_rest_error(): @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.ListDatasetVersionsRequest, + project_service.EnrollSolutionRequest, dict, ], ) -def test_list_dataset_versions_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_enroll_solution_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"project": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = maps_platform_datasets.ListDatasetVersionsResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = maps_platform_datasets.ListDatasetVersionsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_dataset_versions(request) + response = client.enroll_solution(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetVersionsPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_dataset_versions_rest_use_cached_wrapped_rpc(): +def test_enroll_solution_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5497,40 +4773,39 @@ def test_list_dataset_versions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_dataset_versions - in client._transport._wrapped_methods - ) + assert client._transport.enroll_solution in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_dataset_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.enroll_solution] = mock_rpc request = {} - client.list_dataset_versions(request) + client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_dataset_versions(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enroll_solution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_dataset_versions_rest_required_fields( - request_type=maps_platform_datasets.ListDatasetVersionsRequest, +def test_enroll_solution_rest_required_fields( + request_type=project_service.EnrollSolutionRequest, ): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + transport_class = transports.ProjectServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["project"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5541,37 +4816,30 @@ def test_list_dataset_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_dataset_versions._get_unset_required_fields(jsonified_request) + ).enroll_solution._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["project"] = "project_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_dataset_versions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).enroll_solution._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = maps_platform_datasets.ListDatasetVersionsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5583,71 +4851,67 @@ def test_list_dataset_versions_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = maps_platform_datasets.ListDatasetVersionsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_dataset_versions(request) + response = client.enroll_solution(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_dataset_versions_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_enroll_solution_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_dataset_versions._get_unset_required_fields({}) + unset_fields = transport.enroll_solution._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "project", + "solution", ) ) - & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dataset_versions_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_enroll_solution_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + else transports.ProjectServiceRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + client = ProjectServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, - "post_list_dataset_versions", + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_enroll_solution" ) as post, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, - "pre_list_dataset_versions", + transports.ProjectServiceRestInterceptor, "pre_enroll_solution" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = maps_platform_datasets.ListDatasetVersionsRequest.pb( - maps_platform_datasets.ListDatasetVersionsRequest() + pb_message = project_service.EnrollSolutionRequest.pb( + project_service.EnrollSolutionRequest() ) transcode.return_value = { "method": "post", @@ -5659,21 +4923,19 @@ def test_list_dataset_versions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - maps_platform_datasets.ListDatasetVersionsResponse.to_json( - maps_platform_datasets.ListDatasetVersionsResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = maps_platform_datasets.ListDatasetVersionsRequest() + request = project_service.EnrollSolutionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + post.return_value = operations_pb2.Operation() - client.list_dataset_versions( + client.enroll_solution( request, metadata=[ ("key", "val"), @@ -5685,17 +4947,16 @@ def test_list_dataset_versions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_dataset_versions_rest_bad_request( - transport: str = "rest", - request_type=maps_platform_datasets.ListDatasetVersionsRequest, +def test_enroll_solution_rest_bad_request( + transport: str = "rest", request_type=project_service.EnrollSolutionRequest ): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"project": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5707,141 +4968,24 @@ def test_list_dataset_versions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_dataset_versions(request) - - -def test_list_dataset_versions_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = maps_platform_datasets.ListDatasetVersionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/datasets/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = maps_platform_datasets.ListDatasetVersionsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_dataset_versions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=projects/*/datasets/*}:listVersions" - % client.transport._host, - args[1], - ) - - -def test_list_dataset_versions_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dataset_versions( - maps_platform_datasets.ListDatasetVersionsRequest(), - name="name_value", - ) + client.enroll_solution(request) -def test_list_dataset_versions_rest_pager(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_enroll_solution_rest_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetVersionsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - maps_platform_datasets.ListDatasetVersionsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"name": "projects/sample1/datasets/sample2"} - - pager = client.list_dataset_versions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) for i in results) - - pages = list(client.list_dataset_versions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.ListDatasetsRequest, + project_service.ListEnrolledSolutionsRequest, dict, ], ) -def test_list_datasets_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5853,31 +4997,33 @@ def test_list_datasets_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = maps_platform_datasets.ListDatasetsResponse( - next_page_token="next_page_token_value", + return_value = project_service.ListEnrolledSolutionsResponse( + enrolled_solutions=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + return_value = project_service.ListEnrolledSolutionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_datasets(request) + response = client.list_enrolled_solutions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, project_service.ListEnrolledSolutionsResponse) + assert response.enrolled_solutions == [ + common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + ] -def test_list_datasets_rest_use_cached_wrapped_rpc(): +def test_list_enrolled_solutions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5887,32 +5033,37 @@ def test_list_datasets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_datasets in client._transport._wrapped_methods + assert ( + client._transport.list_enrolled_solutions + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_enrolled_solutions + ] = mock_rpc request = {} - client.list_datasets(request) + client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_datasets(request) + client.list_enrolled_solutions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_datasets_rest_required_fields( - request_type=maps_platform_datasets.ListDatasetsRequest, +def test_list_enrolled_solutions_rest_required_fields( + request_type=project_service.ListEnrolledSolutionsRequest, ): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + transport_class = transports.ProjectServiceRestTransport request_init = {} request_init["parent"] = "" @@ -5926,7 +5077,7 @@ def test_list_datasets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_datasets._get_unset_required_fields(jsonified_request) + ).list_enrolled_solutions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5935,28 +5086,21 @@ def test_list_datasets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_datasets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).list_enrolled_solutions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = maps_platform_datasets.ListDatasetsResponse() + return_value = project_service.ListEnrolledSolutionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5977,58 +5121,52 @@ def test_list_datasets_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + return_value = project_service.ListEnrolledSolutionsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_datasets(request) + response = client.list_enrolled_solutions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_datasets_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_list_enrolled_solutions_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_datasets._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.list_enrolled_solutions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_datasets_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_list_enrolled_solutions_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + else transports.ProjectServiceRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + client = ProjectServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "post_list_datasets" + transports.ProjectServiceRestInterceptor, "post_list_enrolled_solutions" ) as post, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_list_datasets" + transports.ProjectServiceRestInterceptor, "pre_list_enrolled_solutions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = maps_platform_datasets.ListDatasetsRequest.pb( - maps_platform_datasets.ListDatasetsRequest() + pb_message = project_service.ListEnrolledSolutionsRequest.pb( + project_service.ListEnrolledSolutionsRequest() ) transcode.return_value = { "method": "post", @@ -6040,19 +5178,21 @@ def test_list_datasets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = maps_platform_datasets.ListDatasetsResponse.to_json( - maps_platform_datasets.ListDatasetsResponse() + req.return_value._content = ( + project_service.ListEnrolledSolutionsResponse.to_json( + project_service.ListEnrolledSolutionsResponse() + ) ) - request = maps_platform_datasets.ListDatasetsRequest() + request = project_service.ListEnrolledSolutionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = maps_platform_datasets.ListDatasetsResponse() + post.return_value = project_service.ListEnrolledSolutionsResponse() - client.list_datasets( + client.list_enrolled_solutions( request, metadata=[ ("key", "val"), @@ -6064,10 +5204,10 @@ def test_list_datasets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_datasets_rest_bad_request( - transport: str = "rest", request_type=maps_platform_datasets.ListDatasetsRequest +def test_list_enrolled_solutions_rest_bad_request( + transport: str = "rest", request_type=project_service.ListEnrolledSolutionsRequest ): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6085,11 +5225,11 @@ def test_list_datasets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_datasets(request) + client.list_enrolled_solutions(request) -def test_list_datasets_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions_rest_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6097,7 +5237,7 @@ def test_list_datasets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = maps_platform_datasets.ListDatasetsResponse() + return_value = project_service.ListEnrolledSolutionsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1"} @@ -6112,24 +5252,25 @@ def test_list_datasets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + return_value = project_service.ListEnrolledSolutionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_datasets(**mock_args) + client.list_enrolled_solutions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*}/datasets" % client.transport._host, args[1] + "%s/v2alpha/{parent=projects/*}:enrolledSolutions" % client.transport._host, + args[1], ) -def test_list_datasets_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_list_enrolled_solutions_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6137,115 +5278,63 @@ def test_list_datasets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_datasets( - maps_platform_datasets.ListDatasetsRequest(), + client.list_enrolled_solutions( + project_service.ListEnrolledSolutionsRequest(), parent="parent_value", ) -def test_list_datasets_rest_pager(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_list_enrolled_solutions_rest_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token="abc", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - maps_platform_datasets.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - maps_platform_datasets.ListDatasetsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1"} - - pager = client.list_datasets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) for i in results) - - pages = list(client.list_datasets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.DeleteDatasetRequest, + project_service.GetLoggingConfigRequest, dict, ], ) -def test_delete_dataset_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"name": "projects/sample1/loggingConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = project.LoggingConfig( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = project.LoggingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_dataset(request) + response = client.get_logging_config(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, project.LoggingConfig) + assert response.name == "name_value" -def test_delete_dataset_rest_use_cached_wrapped_rpc(): +def test_get_logging_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6255,32 +5344,36 @@ def test_delete_dataset_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_dataset in client._transport._wrapped_methods + assert ( + client._transport.get_logging_config in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_logging_config + ] = mock_rpc request = {} - client.delete_dataset(request) + client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_dataset(request) + client.get_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_dataset_rest_required_fields( - request_type=maps_platform_datasets.DeleteDatasetRequest, +def test_get_logging_config_rest_required_fields( + request_type=project_service.GetLoggingConfigRequest, ): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + transport_class = transports.ProjectServiceRestTransport request_init = {} request_init["name"] = "" @@ -6294,7 +5387,7 @@ def test_delete_dataset_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_dataset._get_unset_required_fields(jsonified_request) + ).get_logging_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6303,23 +5396,21 @@ def test_delete_dataset_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_dataset._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + ).get_logging_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = project.LoggingConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6331,53 +5422,59 @@ def test_delete_dataset_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = project.LoggingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_dataset(request) + response = client.get_logging_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_dataset_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_get_logging_config_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.get_logging_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dataset_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_get_logging_config_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + else transports.ProjectServiceRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + client = ProjectServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_delete_dataset" + transports.ProjectServiceRestInterceptor, "post_get_logging_config" + ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "pre_get_logging_config" ) as pre: pre.assert_not_called() - pb_message = maps_platform_datasets.DeleteDatasetRequest.pb( - maps_platform_datasets.DeleteDatasetRequest() + post.assert_not_called() + pb_message = project_service.GetLoggingConfigRequest.pb( + project_service.GetLoggingConfigRequest() ) transcode.return_value = { "method": "post", @@ -6389,15 +5486,19 @@ def test_delete_dataset_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = project.LoggingConfig.to_json( + project.LoggingConfig() + ) - request = maps_platform_datasets.DeleteDatasetRequest() + request = project_service.GetLoggingConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = project.LoggingConfig() - client.delete_dataset( + client.get_logging_config( request, metadata=[ ("key", "val"), @@ -6406,18 +5507,19 @@ def test_delete_dataset_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_dataset_rest_bad_request( - transport: str = "rest", request_type=maps_platform_datasets.DeleteDatasetRequest +def test_get_logging_config_rest_bad_request( + transport: str = "rest", request_type=project_service.GetLoggingConfigRequest ): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"name": "projects/sample1/loggingConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6429,11 +5531,11 @@ def test_delete_dataset_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_dataset(request) + client.get_logging_config(request) -def test_delete_dataset_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_rest_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6441,10 +5543,10 @@ def test_delete_dataset_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = project.LoggingConfig() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/datasets/sample2"} + sample_request = {"name": "projects/sample1/loggingConfig"} # get truthy value for each flattened field mock_args = dict( @@ -6455,23 +5557,26 @@ def test_delete_dataset_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = project.LoggingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_dataset(**mock_args) + client.get_logging_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/datasets/*}" % client.transport._host, args[1] + "%s/v2alpha/{name=projects/*/loggingConfig}" % client.transport._host, + args[1], ) -def test_delete_dataset_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6479,14 +5584,14 @@ def test_delete_dataset_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_dataset( - maps_platform_datasets.DeleteDatasetRequest(), + client.get_logging_config( + project_service.GetLoggingConfigRequest(), name="name_value", ) -def test_delete_dataset_rest_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_logging_config_rest_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6494,86 +5599,169 @@ def test_delete_dataset_rest_error(): @pytest.mark.parametrize( "request_type", [ - maps_platform_datasets.DeleteDatasetVersionRequest, + project_service.UpdateLoggingConfigRequest, dict, ], ) -def test_delete_dataset_version_rest(request_type): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + request_init = {"logging_config": {"name": "projects/sample1/loggingConfig"}} + request_init["logging_config"] = { + "name": "projects/sample1/loggingConfig", + "default_log_generation_rule": { + "logging_level": 1, + "info_log_sample_rate": 0.21050000000000002, + }, + "service_log_generation_rules": [ + {"service_name": "service_name_value", "log_generation_rule": {}} + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + # Determine if the message type is proto-plus or protobuf + test_field = project_service.UpdateLoggingConfigRequest.meta.fields[ + "logging_config" + ] - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_dataset_version(request) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert response is None + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_delete_dataset_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + subfields_not_in_runtime = [] - # Ensure method has been cached - assert ( - client._transport.delete_dataset_version - in client._transport._wrapped_methods - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["logging_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["logging_config"][field])): + del request_init["logging_config"][field][i][subfield] + else: + del request_init["logging_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = project.LoggingConfig( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = project.LoggingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_logging_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, project.LoggingConfig) + assert response.name == "name_value" + + +def test_update_logging_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_logging_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_dataset_version + client._transport.update_logging_config ] = mock_rpc request = {} - client.delete_dataset_version(request) + client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_dataset_version(request) + client.update_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_dataset_version_rest_required_fields( - request_type=maps_platform_datasets.DeleteDatasetVersionRequest, +def test_update_logging_config_rest_required_fields( + request_type=project_service.UpdateLoggingConfigRequest, ): - transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + transport_class = transports.ProjectServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6584,30 +5772,28 @@ def test_delete_dataset_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_dataset_version._get_unset_required_fields(jsonified_request) + ).update_logging_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_dataset_version._get_unset_required_fields(jsonified_request) + ).update_logging_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = project.LoggingConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6619,54 +5805,60 @@ def test_delete_dataset_version_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = project.LoggingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_dataset_version(request) + response = client.update_logging_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_dataset_version_rest_unset_required_fields(): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_update_logging_config_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_dataset_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_logging_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("loggingConfig",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dataset_version_rest_interceptors(null_interceptor): - transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( +def test_update_logging_config_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + else transports.ProjectServiceRestInterceptor(), ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + client = ProjectServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaRestInterceptor, - "pre_delete_dataset_version", + transports.ProjectServiceRestInterceptor, "post_update_logging_config" + ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "pre_update_logging_config" ) as pre: pre.assert_not_called() - pb_message = maps_platform_datasets.DeleteDatasetVersionRequest.pb( - maps_platform_datasets.DeleteDatasetVersionRequest() + post.assert_not_called() + pb_message = project_service.UpdateLoggingConfigRequest.pb( + project_service.UpdateLoggingConfigRequest() ) transcode.return_value = { "method": "post", @@ -6678,15 +5870,19 @@ def test_delete_dataset_version_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = project.LoggingConfig.to_json( + project.LoggingConfig() + ) - request = maps_platform_datasets.DeleteDatasetVersionRequest() + request = project_service.UpdateLoggingConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = project.LoggingConfig() - client.delete_dataset_version( + client.update_logging_config( request, metadata=[ ("key", "val"), @@ -6695,19 +5891,19 @@ def test_delete_dataset_version_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_dataset_version_rest_bad_request( - transport: str = "rest", - request_type=maps_platform_datasets.DeleteDatasetVersionRequest, +def test_update_logging_config_rest_bad_request( + transport: str = "rest", request_type=project_service.UpdateLoggingConfigRequest ): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/datasets/sample2"} + request_init = {"logging_config": {"name": "projects/sample1/loggingConfig"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6719,11 +5915,11 @@ def test_delete_dataset_version_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_dataset_version(request) + client.update_logging_config(request) -def test_delete_dataset_version_rest_flattened(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_rest_flattened(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6731,39 +5927,42 @@ def test_delete_dataset_version_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = project.LoggingConfig() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/datasets/sample2"} + sample_request = {"logging_config": {"name": "projects/sample1/loggingConfig"}} # get truthy value for each flattened field mock_args = dict( - name="name_value", + logging_config=project.LoggingConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = project.LoggingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_dataset_version(**mock_args) + client.update_logging_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/datasets/*}:deleteVersion" + "%s/v2alpha/{logging_config.name=projects/*/loggingConfig}" % client.transport._host, args[1], ) -def test_delete_dataset_version_rest_flattened_error(transport: str = "rest"): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6771,747 +5970,1926 @@ def test_delete_dataset_version_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_dataset_version( - maps_platform_datasets.DeleteDatasetVersionRequest(), - name="name_value", + client.update_logging_config( + project_service.UpdateLoggingConfigRequest(), + logging_config=project.LoggingConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_dataset_version_rest_error(): - client = MapsPlatformDatasetsV1AlphaClient( +def test_update_logging_config_rest_error(): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + project_service.GetAlertConfigRequest, + dict, + ], +) +def test_get_alert_config_rest(request_type): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MapsPlatformDatasetsV1AlphaClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/alertConfig"} + request = request_type(**request_init) - # It is an error to provide an api_key and a transport instance. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MapsPlatformDatasetsV1AlphaClient( - client_options=options, - transport=transport, + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = project.AlertConfig( + name="name_value", ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MapsPlatformDatasetsV1AlphaClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = project.AlertConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # It is an error to provide scopes and a transport instance. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MapsPlatformDatasetsV1AlphaClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_alert_config(request) + # Establish that the response is the type that we expect. + assert isinstance(response, project.AlertConfig) + assert response.name == "name_value" -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) - assert client.transport is transport +def test_get_alert_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - transport = transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Ensure method has been cached + assert client._transport.get_alert_config in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_alert_config + ] = mock_rpc -@pytest.mark.parametrize( - "transport_class", - [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request = {} + client.get_alert_config(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = MapsPlatformDatasetsV1AlphaClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + client.get_alert_config(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - ) +def test_get_alert_config_rest_required_fields( + request_type=project_service.GetAlertConfigRequest, +): + transport_class = transports.ProjectServiceRestTransport -def test_maps_platform_datasets_v1_alpha_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MapsPlatformDatasetsV1AlphaTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_maps_platform_datasets_v1_alpha_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.MapsPlatformDatasetsV1AlphaTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_alert_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_dataset", - "update_dataset_metadata", - "get_dataset", - "list_dataset_versions", - "list_datasets", - "delete_dataset", - "delete_dataset_version", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + # verify required fields with default values are now present - with pytest.raises(NotImplementedError): - transport.close() + jsonified_request["name"] = "name_value" - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_alert_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_maps_platform_datasets_v1_alpha_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MapsPlatformDatasetsV1AlphaTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = project.AlertConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = project.AlertConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_alert_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_alert_config_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_alert_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_alert_config_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProjectServiceRestInterceptor(), + ) + client = ProjectServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_get_alert_config" + ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "pre_get_alert_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = project_service.GetAlertConfigRequest.pb( + project_service.GetAlertConfigRequest() ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = project.AlertConfig.to_json(project.AlertConfig()) + + request = project_service.GetAlertConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = project.AlertConfig() + + client.get_alert_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_maps_platform_datasets_v1_alpha_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MapsPlatformDatasetsV1AlphaTransport() - adc.assert_called_once() +def test_get_alert_config_rest_bad_request( + transport: str = "rest", request_type=project_service.GetAlertConfigRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -def test_maps_platform_datasets_v1_alpha_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MapsPlatformDatasetsV1AlphaClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/alertConfig"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_alert_config(request) + + +def test_get_alert_config_rest_flattened(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = project.AlertConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/alertConfig"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = project.AlertConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + client.get_alert_config(**mock_args) -@pytest.mark.parametrize( - "transport_class", - [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - ], -) -def test_maps_platform_datasets_v1_alpha_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2alpha/{name=projects/*/alertConfig}" % client.transport._host, args[1] ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - transports.MapsPlatformDatasetsV1AlphaRestTransport, - ], -) -def test_maps_platform_datasets_v1_alpha_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) +def test_get_alert_config_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_alert_config( + project_service.GetAlertConfigRequest(), + name="name_value", + ) + + +def test_get_alert_config_rest_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_class,grpc_helpers", + "request_type", [ - (transports.MapsPlatformDatasetsV1AlphaGrpcTransport, grpc_helpers), - ( - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - grpc_helpers_async, - ), + project_service.UpdateAlertConfigRequest, + dict, ], ) -def test_maps_platform_datasets_v1_alpha_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) +def test_update_alert_config_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - create_channel.assert_called_with( - "mapsplatformdatasets.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="mapsplatformdatasets.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + # send a request that will satisfy transcoding + request_init = {"alert_config": {"name": "projects/sample1/alertConfig"}} + request_init["alert_config"] = { + "name": "projects/sample1/alertConfig", + "alert_policies": [ + { + "alert_group": "alert_group_value", + "enroll_status": 1, + "recipients": [{"email_address": "email_address_value"}], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = project_service.UpdateAlertConfigRequest.meta.fields["alert_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["alert_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["alert_config"][field])): + del request_init["alert_config"][field][i][subfield] + else: + del request_init["alert_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = project.AlertConfig( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = project.AlertConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_alert_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, project.AlertConfig) + assert response.name == "name_value" + + +def test_update_alert_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_alert_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_alert_config + ] = mock_rpc + + request = {} + client.update_alert_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_alert_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_alert_config_rest_required_fields( + request_type=project_service.UpdateAlertConfigRequest, +): + transport_class = transports.ProjectServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_alert_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_alert_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = project.AlertConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = project.AlertConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_alert_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_alert_config_rest_unset_required_fields(): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_alert_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("alertConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_alert_config_rest_interceptors(null_interceptor): + transport = transports.ProjectServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProjectServiceRestInterceptor(), + ) + client = ProjectServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_update_alert_config" + ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "pre_update_alert_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = project_service.UpdateAlertConfigRequest.pb( + project_service.UpdateAlertConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = project.AlertConfig.to_json(project.AlertConfig()) + + request = project_service.UpdateAlertConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = project.AlertConfig() + + client.update_alert_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_alert_config_rest_bad_request( + transport: str = "rest", request_type=project_service.UpdateAlertConfigRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"alert_config": {"name": "projects/sample1/alertConfig"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_alert_config(request) + + +def test_update_alert_config_rest_flattened(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = project.AlertConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"alert_config": {"name": "projects/sample1/alertConfig"}} + + # get truthy value for each flattened field + mock_args = dict( + alert_config=project.AlertConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = project.AlertConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_alert_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2alpha/{alert_config.name=projects/*/alertConfig}" + % client.transport._host, + args[1], + ) + + +def test_update_alert_config_rest_flattened_error(transport: str = "rest"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_alert_config( + project_service.UpdateAlertConfigRequest(), + alert_config=project.AlertConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_alert_config_rest_error(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProjectServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ProjectServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + transports.ProjectServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProjectServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ProjectServiceGrpcTransport, + ) + + +def test_project_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProjectServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_project_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.retail_v2alpha.services.project_service.transports.ProjectServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProjectServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_project", + "accept_terms", + "enroll_solution", + "list_enrolled_solutions", + "get_logging_config", + "update_logging_config", + "get_alert_config", + "update_alert_config", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_project_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.retail_v2alpha.services.project_service.transports.ProjectServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_project_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.retail_v2alpha.services.project_service.transports.ProjectServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectServiceTransport() + adc.assert_called_once() + + +def test_project_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProjectServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + transports.ProjectServiceRestTransport, + ], +) +def test_project_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ProjectServiceGrpcTransport, grpc_helpers), + (transports.ProjectServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_project_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_project_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProjectServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_project_service_rest_lro_client(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_project_service_host_no_port(transport_name): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_project_service_host_with_port(transport_name): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_project_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProjectServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProjectServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_project._session + session2 = client2.transport.get_project._session + assert session1 != session2 + session1 = client1.transport.accept_terms._session + session2 = client2.transport.accept_terms._session + assert session1 != session2 + session1 = client1.transport.enroll_solution._session + session2 = client2.transport.enroll_solution._session + assert session1 != session2 + session1 = client1.transport.list_enrolled_solutions._session + session2 = client2.transport.list_enrolled_solutions._session + assert session1 != session2 + session1 = client1.transport.get_logging_config._session + session2 = client2.transport.get_logging_config._session + assert session1 != session2 + session1 = client1.transport.update_logging_config._session + session2 = client2.transport.update_logging_config._session + assert session1 != session2 + session1 = client1.transport.get_alert_config._session + session2 = client2.transport.get_alert_config._session + assert session1 != session2 + session1 = client1.transport.update_alert_config._session + session2 = client2.transport.update_alert_config._session + assert session1 != session2 + + +def test_project_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProjectServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_project_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProjectServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProjectServiceGrpcTransport, + transports.ProjectServiceGrpcAsyncIOTransport, + ], +) +def test_project_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_project_service_grpc_lro_client(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_project_service_grpc_lro_async_client(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_alert_config_path(): + project = "squid" + expected = "projects/{project}/alertConfig".format( + project=project, + ) + actual = ProjectServiceClient.alert_config_path(project) + assert expected == actual + + +def test_parse_alert_config_path(): + expected = { + "project": "clam", + } + path = ProjectServiceClient.alert_config_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_alert_config_path(path) + assert expected == actual + + +def test_logging_config_path(): + project = "whelk" + expected = "projects/{project}/loggingConfig".format( + project=project, + ) + actual = ProjectServiceClient.logging_config_path(project) + assert expected == actual + + +def test_parse_logging_config_path(): + expected = { + "project": "octopus", + } + path = ProjectServiceClient.logging_config_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_logging_config_path(path) + assert expected == actual + + +def test_retail_project_path(): + project = "oyster" + expected = "projects/{project}/retailProject".format( + project=project, + ) + actual = ProjectServiceClient.retail_project_path(project) + assert expected == actual + + +def test_parse_retail_project_path(): + expected = { + "project": "nudibranch", + } + path = ProjectServiceClient.retail_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_retail_project_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProjectServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ProjectServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProjectServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ProjectServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProjectServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ProjectServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ProjectServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ProjectServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProjectServiceClient.common_location_path(project, location) + assert expected == actual -@pytest.mark.parametrize( - "transport_class", - [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - ], -) -def test_maps_platform_datasets_v1_alpha_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ProjectServiceClient.common_location_path(**expected) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + # Check that the path construction is reversible. + actual = ProjectServiceClient.parse_common_location_path(path) + assert expected == actual - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -def test_maps_platform_datasets_v1_alpha_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.MapsPlatformDatasetsV1AlphaRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + with mock.patch.object( + transports.ProjectServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProjectServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProjectServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_maps_platform_datasets_v1_alpha_host_no_port(transport_name): - client = MapsPlatformDatasetsV1AlphaClient( +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="mapsplatformdatasets.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "mapsplatformdatasets.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://mapsplatformdatasets.googleapis.com" + transport="grpc_asyncio", ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_maps_platform_datasets_v1_alpha_host_with_port(transport_name): - client = MapsPlatformDatasetsV1AlphaClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="mapsplatformdatasets.googleapis.com:8000" - ), - transport=transport_name, + transport=transport, ) - assert client.transport._host == ( - "mapsplatformdatasets.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://mapsplatformdatasets.googleapis.com:8000" + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + }, + request, ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "rest", + operations_pb2.GetOperationRequest, + dict, ], ) -def test_maps_platform_datasets_v1_alpha_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MapsPlatformDatasetsV1AlphaClient( - credentials=creds1, - transport=transport_name, - ) - client2 = MapsPlatformDatasetsV1AlphaClient( - credentials=creds2, - transport=transport_name, +def test_get_operation_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - session1 = client1.transport.create_dataset._session - session2 = client2.transport.create_dataset._session - assert session1 != session2 - session1 = client1.transport.update_dataset_metadata._session - session2 = client2.transport.update_dataset_metadata._session - assert session1 != session2 - session1 = client1.transport.get_dataset._session - session2 = client2.transport.get_dataset._session - assert session1 != session2 - session1 = client1.transport.list_dataset_versions._session - session2 = client2.transport.list_dataset_versions._session - assert session1 != session2 - session1 = client1.transport.list_datasets._session - session2 = client2.transport.list_datasets._session - assert session1 != session2 - session1 = client1.transport.delete_dataset._session - session2 = client2.transport.delete_dataset._session - assert session1 != session2 - session1 = client1.transport.delete_dataset_version._session - session2 = client2.transport.delete_dataset_version._session - assert session1 != session2 + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -def test_maps_platform_datasets_v1_alpha_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Check that channel is used if provided. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_maps_platform_datasets_v1_alpha_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/catalogs/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + operations_pb2.ListOperationsRequest, + dict, ], ) -def test_maps_platform_datasets_v1_alpha_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred +def test_list_operations_rest(request_type): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - ], -) -def test_maps_platform_datasets_v1_alpha_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_dataset_path(): - project = "squid" - dataset = "clam" - expected = "projects/{project}/datasets/{dataset}".format( - project=project, - dataset=dataset, + +def test_get_operation_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = MapsPlatformDatasetsV1AlphaClient.dataset_path(project, dataset) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() -def test_parse_dataset_path(): - expected = { - "project": "whelk", - "dataset": "octopus", - } - path = MapsPlatformDatasetsV1AlphaClient.dataset_path(**expected) + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = MapsPlatformDatasetsV1AlphaClient.parse_dataset_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = MapsPlatformDatasetsV1AlphaClient.common_billing_account_path( - billing_account +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = MapsPlatformDatasetsV1AlphaClient.common_billing_account_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = MapsPlatformDatasetsV1AlphaClient.parse_common_billing_account_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format( - folder=folder, +def test_get_operation_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = MapsPlatformDatasetsV1AlphaClient.common_folder_path(folder) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = MapsPlatformDatasetsV1AlphaClient.common_folder_path(**expected) - # Check that the path construction is reversible. - actual = MapsPlatformDatasetsV1AlphaClient.parse_common_folder_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format( - organization=organization, +def test_list_operations(transport: str = "grpc"): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = MapsPlatformDatasetsV1AlphaClient.common_organization_path(organization) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = MapsPlatformDatasetsV1AlphaClient.common_organization_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = MapsPlatformDatasetsV1AlphaClient.parse_common_organization_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format( - project=project, +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = MapsPlatformDatasetsV1AlphaClient.common_project_path(project) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = MapsPlatformDatasetsV1AlphaClient.common_project_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = MapsPlatformDatasetsV1AlphaClient.parse_common_project_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, +def test_list_operations_field_headers(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = MapsPlatformDatasetsV1AlphaClient.common_location_path(project, location) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = MapsPlatformDatasetsV1AlphaClient.common_location_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() - # Check that the path construction is reversible. - actual = MapsPlatformDatasetsV1AlphaClient.parse_common_location_path(path) - assert expected == actual + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaTransport, "_prep_wrapped_messages" - ) as prep: - client = MapsPlatformDatasetsV1AlphaClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ProjectServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() ) - prep.assert_called_once_with(client_info) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - with mock.patch.object( - transports.MapsPlatformDatasetsV1AlphaTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = MapsPlatformDatasetsV1AlphaClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ProjectServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): - client = MapsPlatformDatasetsV1AlphaAsyncClient( +async def test_list_operations_from_dict_async(): + client = ProjectServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close(): @@ -7521,7 +7899,7 @@ def test_transport_close(): } for transport, close_name in transports.items(): - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -7538,7 +7916,7 @@ def test_client_ctx(): "grpc", ] for transport in transports: - client = MapsPlatformDatasetsV1AlphaClient( + client = ProjectServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -7552,14 +7930,8 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - ( - MapsPlatformDatasetsV1AlphaClient, - transports.MapsPlatformDatasetsV1AlphaGrpcTransport, - ), - ( - MapsPlatformDatasetsV1AlphaAsyncClient, - transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, - ), + (ProjectServiceClient, transports.ProjectServiceGrpcTransport), + (ProjectServiceAsyncClient, transports.ProjectServiceGrpcAsyncIOTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py index 4d174b905830..b2b3e692f807 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py @@ -1477,13 +1477,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("placement", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py index 64305bed41ed..aee5123f8ed7 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py @@ -1229,6 +1229,7 @@ def test_create_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.create_serving_config(request) @@ -1262,6 +1263,7 @@ def test_create_serving_config(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -1390,6 +1392,7 @@ async def test_create_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -1482,6 +1485,7 @@ async def test_create_serving_config_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -1516,6 +1520,7 @@ async def test_create_serving_config_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2111,6 +2116,7 @@ def test_update_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.update_serving_config(request) @@ -2144,6 +2150,7 @@ def test_update_serving_config(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2266,6 +2273,7 @@ async def test_update_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2358,6 +2366,7 @@ async def test_update_serving_config_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2392,6 +2401,7 @@ async def test_update_serving_config_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2600,6 +2610,7 @@ def test_get_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.get_serving_config(request) @@ -2633,6 +2644,7 @@ def test_get_serving_config(request_type, transport: str = "grpc"): == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2758,6 +2770,7 @@ async def test_get_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2850,6 +2863,7 @@ async def test_get_serving_config_async( diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2884,6 +2898,7 @@ async def test_get_serving_config_async( == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -3476,13 +3491,13 @@ def test_list_serving_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_serving_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3672,6 +3687,7 @@ def test_add_control(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.add_control(request) @@ -3705,6 +3721,7 @@ def test_add_control(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -3822,6 +3839,7 @@ async def test_add_control_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -3912,6 +3930,7 @@ async def test_add_control_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -3946,6 +3965,7 @@ async def test_add_control_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4134,6 +4154,7 @@ def test_remove_control(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.remove_control(request) @@ -4167,6 +4188,7 @@ def test_remove_control(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4284,6 +4306,7 @@ async def test_remove_control_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -4374,6 +4397,7 @@ async def test_remove_control_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -4408,6 +4432,7 @@ async def test_remove_control_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4613,6 +4638,7 @@ def test_create_serving_config_rest(request_type): "diversity_level": "diversity_level_value", "diversity_type": 2, "enable_category_filter_level": "enable_category_filter_level_value", + "ignore_recs_denylist": True, "personalization_spec": {"mode": 1}, "solution_types": [1], } @@ -4707,6 +4733,7 @@ def get_message_fields(field): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -4744,6 +4771,7 @@ def get_message_fields(field): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -5402,6 +5430,7 @@ def test_update_serving_config_rest(request_type): "diversity_level": "diversity_level_value", "diversity_type": 2, "enable_category_filter_level": "enable_category_filter_level_value", + "ignore_recs_denylist": True, "personalization_spec": {"mode": 1}, "solution_types": [1], } @@ -5496,6 +5525,7 @@ def get_message_fields(field): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -5533,6 +5563,7 @@ def get_message_fields(field): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -5852,6 +5883,7 @@ def test_get_serving_config_rest(request_type): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -5889,6 +5921,7 @@ def test_get_serving_config_rest(request_type): == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -6595,6 +6628,7 @@ def test_add_control_rest(request_type): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -6632,6 +6666,7 @@ def test_add_control_rest(request_type): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -6954,6 +6989,7 @@ def test_remove_control_rest(request_type): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -6991,6 +7027,7 @@ def test_remove_control_rest(request_type): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py index f5d88c7565d4..f8d669a4bb9f 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py @@ -1531,13 +1531,13 @@ def test_list_catalogs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_catalogs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py index d4de73cd4cf4..012b23b1b645 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py @@ -1885,6 +1885,7 @@ def test_complete_query_rest_required_fields( ( "dataset", "device_type", + "enable_attribute_suggestions", "entity", "language_codes", "max_suggestions", @@ -1958,6 +1959,7 @@ def test_complete_query_rest_unset_required_fields(): ( "dataset", "deviceType", + "enableAttributeSuggestions", "entity", "languageCodes", "maxSuggestions", diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py index 6d9f7647bd44..0a77e73bf443 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py @@ -3183,13 +3183,13 @@ def test_list_controls_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_controls(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3412,11 +3412,20 @@ def test_create_control_rest(request_type): "twoway_synonyms_action": { "synonyms": ["synonyms_value1", "synonyms_value2"] }, + "force_return_facet_action": { + "facet_position_adjustments": [ + {"attribute_name": "attribute_name_value", "position": 885} + ] + }, + "remove_facet_action": { + "attribute_names": ["attribute_names_value1", "attribute_names_value2"] + }, "condition": { "query_terms": [{"value": "value_value", "full_match": True}], "active_time_range": [ {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} ], + "page_categories": ["page_categories_value1", "page_categories_value2"], }, }, "name": "name_value", @@ -4209,11 +4218,20 @@ def test_update_control_rest(request_type): "twoway_synonyms_action": { "synonyms": ["synonyms_value1", "synonyms_value2"] }, + "force_return_facet_action": { + "facet_position_adjustments": [ + {"attribute_name": "attribute_name_value", "position": 885} + ] + }, + "remove_facet_action": { + "attribute_names": ["attribute_names_value1", "attribute_names_value2"] + }, "condition": { "query_terms": [{"value": "value_value", "full_match": True}], "active_time_range": [ {"start_time": {"seconds": 751, "nanos": 543}, "end_time": {}} ], + "page_categories": ["page_categories_value1", "page_categories_value2"], }, }, "name": "projects/sample1/locations/sample2/catalogs/sample3/controls/sample4", diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py index 05e75d4edb9e..61dc74116faa 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py @@ -3513,13 +3513,13 @@ def test_list_models_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_models(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4498,6 +4498,9 @@ def test_create_model_rest(request_type): ] } ], + "model_features_config": { + "frequently_bought_together_config": {"context_products_type": 1} + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6561,6 +6564,9 @@ def test_update_model_rest(request_type): ] } ], + "model_features_config": { + "frequently_bought_together_config": {"context_products_type": 1} + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py index c3e5b88f9d82..e7eed26a815a 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py @@ -64,10 +64,10 @@ pagers, transports, ) +from google.cloud.retail_v2beta.types import product_service, promotion, purge_config from google.cloud.retail_v2beta.types import common, import_config from google.cloud.retail_v2beta.types import product from google.cloud.retail_v2beta.types import product as gcr_product -from google.cloud.retail_v2beta.types import product_service, promotion def client_cert_source_callback(): @@ -2475,13 +2475,13 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3445,6 +3445,295 @@ async def test_delete_product_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeProductsRequest, + dict, + ], +) +def test_purge_products(request_type, transport: str = "grpc"): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeProductsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_products_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_products() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeProductsRequest() + + +def test_purge_products_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = purge_config.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_products(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeProductsRequest( + parent="parent_value", + filter="filter_value", + ) + + +def test_purge_products_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.purge_products] = mock_rpc + request = {} + client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_products_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_products() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeProductsRequest() + + +@pytest.mark.asyncio +async def test_purge_products_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.purge_products + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.purge_products + ] = mock_object + + request = {} + await client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.purge_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_products_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeProductsRequest +): + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeProductsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_products_async_from_dict(): + await test_purge_products_async(request_type=dict) + + +def test_purge_products_field_headers(): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeProductsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_products_field_headers_async(): + client = ProductServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeProductsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_products), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -7738,6 +8027,267 @@ def test_delete_product_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeProductsRequest, + dict, + ], +) +def test_purge_products_rest(request_type): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_products(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_products_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.purge_products] = mock_rpc + + request = {} + client.purge_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_purge_products_rest_required_fields( + request_type=purge_config.PurgeProductsRequest, +): + transport_class = transports.ProductServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_products._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_products._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_products(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_products_rest_unset_required_fields(): + transport = transports.ProductServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_products._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_products_rest_interceptors(null_interceptor): + transport = transports.ProductServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProductServiceRestInterceptor(), + ) + client = ProductServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProductServiceRestInterceptor, "post_purge_products" + ) as post, mock.patch.object( + transports.ProductServiceRestInterceptor, "pre_purge_products" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeProductsRequest.pb( + purge_config.PurgeProductsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeProductsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_products( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_products_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeProductsRequest +): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_products(request) + + +def test_purge_products_rest_error(): + client = ProductServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9746,6 +10296,7 @@ def test_product_service_base_transport(): "list_products", "update_product", "delete_product", + "purge_products", "import_products", "set_inventory", "add_fulfillment_places", @@ -10049,6 +10600,9 @@ def test_product_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_product._session session2 = client2.transport.delete_product._session assert session1 != session2 + session1 = client1.transport.purge_products._session + session2 = client2.transport.purge_products._session + assert session1 != session2 session1 = client1.transport.import_products._session session2 = client2.transport.import_products._session assert session1 != session2 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py index fc98b6e6b9bb..ceb57adeac0e 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py @@ -1477,13 +1477,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("placement", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py index 9f1d2f4c96f2..95c30d1c63b7 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py @@ -1229,6 +1229,7 @@ def test_create_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.create_serving_config(request) @@ -1262,6 +1263,7 @@ def test_create_serving_config(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -1390,6 +1392,7 @@ async def test_create_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -1482,6 +1485,7 @@ async def test_create_serving_config_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -1516,6 +1520,7 @@ async def test_create_serving_config_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2111,6 +2116,7 @@ def test_update_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.update_serving_config(request) @@ -2144,6 +2150,7 @@ def test_update_serving_config(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2266,6 +2273,7 @@ async def test_update_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2358,6 +2366,7 @@ async def test_update_serving_config_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2392,6 +2401,7 @@ async def test_update_serving_config_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2600,6 +2610,7 @@ def test_get_serving_config(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.get_serving_config(request) @@ -2633,6 +2644,7 @@ def test_get_serving_config(request_type, transport: str = "grpc"): == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -2758,6 +2770,7 @@ async def test_get_serving_config_empty_call_async(): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2850,6 +2863,7 @@ async def test_get_serving_config_async( diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -2884,6 +2898,7 @@ async def test_get_serving_config_async( == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -3476,13 +3491,13 @@ def test_list_serving_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_serving_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3672,6 +3687,7 @@ def test_add_control(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.add_control(request) @@ -3705,6 +3721,7 @@ def test_add_control(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -3822,6 +3839,7 @@ async def test_add_control_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -3912,6 +3930,7 @@ async def test_add_control_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -3946,6 +3965,7 @@ async def test_add_control_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4134,6 +4154,7 @@ def test_remove_control(request_type, transport: str = "grpc"): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) response = client.remove_control(request) @@ -4167,6 +4188,7 @@ def test_remove_control(request_type, transport: str = "grpc"): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4284,6 +4306,7 @@ async def test_remove_control_empty_call_async(): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -4374,6 +4397,7 @@ async def test_remove_control_async( diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) ) @@ -4408,6 +4432,7 @@ async def test_remove_control_async( == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -4613,6 +4638,7 @@ def test_create_serving_config_rest(request_type): "diversity_level": "diversity_level_value", "diversity_type": 2, "enable_category_filter_level": "enable_category_filter_level_value", + "ignore_recs_denylist": True, "personalization_spec": {"mode": 1}, "solution_types": [1], } @@ -4707,6 +4733,7 @@ def get_message_fields(field): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -4744,6 +4771,7 @@ def get_message_fields(field): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -5402,6 +5430,7 @@ def test_update_serving_config_rest(request_type): "diversity_level": "diversity_level_value", "diversity_type": 2, "enable_category_filter_level": "enable_category_filter_level_value", + "ignore_recs_denylist": True, "personalization_spec": {"mode": 1}, "solution_types": [1], } @@ -5496,6 +5525,7 @@ def get_message_fields(field): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -5533,6 +5563,7 @@ def get_message_fields(field): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -5852,6 +5883,7 @@ def test_get_serving_config_rest(request_type): diversity_level="diversity_level_value", diversity_type=serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -5889,6 +5921,7 @@ def test_get_serving_config_rest(request_type): == serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -6595,6 +6628,7 @@ def test_add_control_rest(request_type): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -6632,6 +6666,7 @@ def test_add_control_rest(request_type): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] @@ -6954,6 +6989,7 @@ def test_remove_control_rest(request_type): diversity_level="diversity_level_value", diversity_type=gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY, enable_category_filter_level="enable_category_filter_level_value", + ignore_recs_denylist=True, solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], ) @@ -6991,6 +7027,7 @@ def test_remove_control_rest(request_type): == gcr_serving_config.ServingConfig.DiversityType.RULE_BASED_DIVERSITY ) assert response.enable_category_filter_level == "enable_category_filter_level_value" + assert response.ignore_recs_denylist is True assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py index ee9aae49d3e7..bc7f95d982a6 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py @@ -1941,13 +1941,13 @@ def test_list_executions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_executions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py index e5b1097be26b..e7a428882179 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py @@ -2164,10 +2164,10 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py index 20676f75f5d9..930187bb2c55 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py @@ -1883,10 +1883,10 @@ def test_list_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py index 4f7c934099d2..ef8e7a2559ae 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py @@ -2233,10 +2233,10 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py index 421d2658f22e..4be4754866af 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py @@ -1898,13 +1898,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py index 3334549a3a29..bd60106b5d7a 100644 --- a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py @@ -1531,13 +1531,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py index fe34a9a5c9e7..89b66eec0113 100644 --- a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py @@ -1533,13 +1533,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py index aadfc59a3fb7..902b57b76cf2 100644 --- a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py +++ b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py @@ -1607,13 +1607,13 @@ def test_list_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4106,13 +4106,13 @@ def test_list_secret_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_secret_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py index c1478dcb970a..44e6f96fd717 100644 --- a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py +++ b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py @@ -1603,13 +1603,13 @@ def test_list_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4073,13 +4073,13 @@ def test_list_secret_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_secret_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py index 4276626bced6..2622fd162920 100644 --- a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py +++ b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py @@ -1607,13 +1607,13 @@ def test_list_secrets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_secrets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4106,13 +4106,13 @@ def test_list_secret_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_secret_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py b/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py index 855f017748fb..977f2379745d 100644 --- a/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py +++ b/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py @@ -1620,13 +1620,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3363,13 +3363,13 @@ def test_list_repositories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_repositories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-securitycenter/CHANGELOG.md b/packages/google-cloud-securitycenter/CHANGELOG.md index b9cb57d672e5..734ecc365ddd 100644 --- a/packages/google-cloud-securitycenter/CHANGELOG.md +++ b/packages/google-cloud-securitycenter/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-securitycenter/#history +## [1.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycenter-v1.31.0...google-cloud-securitycenter-v1.32.0) (2024-06-27) + + +### Features + +* Add toxic_combination and group_memberships fields to finding ([83c851e](https://github.com/googleapis/google-cloud-python/commit/83c851e2aa497b3e5ae940f71dff4c45fa00be8f)) +* added cloud provider field to list findings response ([83c851e](https://github.com/googleapis/google-cloud-python/commit/83c851e2aa497b3e5ae940f71dff4c45fa00be8f)) +* added http configuration rule to ResourceValueConfig and ValuedResource API methods ([83c851e](https://github.com/googleapis/google-cloud-python/commit/83c851e2aa497b3e5ae940f71dff4c45fa00be8f)) +* added toxic combination field to finding ([83c851e](https://github.com/googleapis/google-cloud-python/commit/83c851e2aa497b3e5ae940f71dff4c45fa00be8f)) + + +### Documentation + +* Updated comments for ResourceValueConfig ([83c851e](https://github.com/googleapis/google-cloud-python/commit/83c851e2aa497b3e5ae940f71dff4c45fa00be8f)) + ## [1.31.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycenter-v1.30.1...google-cloud-securitycenter-v1.31.0) (2024-04-17) diff --git a/packages/google-cloud-securitycenter/README.rst b/packages/google-cloud-securitycenter/README.rst index d3c1cb8c8128..af0c588c2fcc 100644 --- a/packages/google-cloud-securitycenter/README.rst +++ b/packages/google-cloud-securitycenter/README.rst @@ -15,7 +15,7 @@ Python Client for Google Cloud Security Command Center .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-securitycenter.svg :target: https://pypi.org/project/google-cloud-securitycenter/ .. _Google Cloud Security Command Center: https://cloud.google.com/security-command-center -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/securitycenter/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/securitycenter/latest/summary_overview .. _Product Documentation: https://cloud.google.com/security-command-center Quick Start diff --git a/packages/google-cloud-securitycenter/docs/index.rst b/packages/google-cloud-securitycenter/docs/index.rst index 34ebeb002c30..c1b3693d06f9 100644 --- a/packages/google-cloud-securitycenter/docs/index.rst +++ b/packages/google-cloud-securitycenter/docs/index.rst @@ -48,3 +48,8 @@ For a list of all ``google-cloud-securitycenter`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-securitycenter/docs/summary_overview.md b/packages/google-cloud-securitycenter/docs/summary_overview.md new file mode 100644 index 000000000000..0413be9f945b --- /dev/null +++ b/packages/google-cloud-securitycenter/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Google Cloud Security Command Center API + +Overview of the APIs available for Google Cloud Security Command Center API. + +## All entries + +Classes, methods and properties & attributes for +Google Cloud Security Command Center API. + +[classes](https://cloud.google.com/python/docs/reference/securitycenter/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/securitycenter/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/securitycenter/latest/summary_property.html) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter/__init__.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter/__init__.py index 06dd2796035d..ee3535d57479 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter/__init__.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter/__init__.py @@ -62,6 +62,7 @@ from google.cloud.securitycenter_v1.types.file import File from google.cloud.securitycenter_v1.types.finding import Finding from google.cloud.securitycenter_v1.types.folder import Folder +from google.cloud.securitycenter_v1.types.group_membership import GroupMembership from google.cloud.securitycenter_v1.types.iam_binding import IamBinding from google.cloud.securitycenter_v1.types.indicator import Indicator from google.cloud.securitycenter_v1.types.kernel_rootkit import KernelRootkit @@ -152,6 +153,7 @@ UpdateSourceRequest, ) from google.cloud.securitycenter_v1.types.source import Source +from google.cloud.securitycenter_v1.types.toxic_combination import ToxicCombination from google.cloud.securitycenter_v1.types.vulnerability import ( Cve, Cvssv3, @@ -191,6 +193,7 @@ "File", "Finding", "Folder", + "GroupMembership", "IamBinding", "Indicator", "KernelRootkit", @@ -271,6 +274,7 @@ "UpdateSecurityMarksRequest", "UpdateSourceRequest", "Source", + "ToxicCombination", "Cve", "Cvssv3", "Package", diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py index 558c8aab67c5..b864a194c452 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/__init__.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/__init__.py index f28322b2d9b8..87cb7ea4cf0f 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/__init__.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/__init__.py @@ -46,6 +46,7 @@ from .types.file import File from .types.finding import Finding from .types.folder import Folder +from .types.group_membership import GroupMembership from .types.iam_binding import IamBinding from .types.indicator import Indicator from .types.kernel_rootkit import KernelRootkit @@ -128,6 +129,7 @@ UpdateSourceRequest, ) from .types.source import Source +from .types.toxic_combination import ToxicCombination from .types.vulnerability import ( Cve, Cvssv3, @@ -191,6 +193,7 @@ "GroupAssetsResponse", "GroupFindingsRequest", "GroupFindingsResponse", + "GroupMembership", "GroupResult", "IamBinding", "Indicator", @@ -243,6 +246,7 @@ "SimulateSecurityHealthAnalyticsCustomModuleRequest", "SimulateSecurityHealthAnalyticsCustomModuleResponse", "Source", + "ToxicCombination", "UpdateBigQueryExportRequest", "UpdateExternalSystemRequest", "UpdateFindingRequest", diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py index 558c8aab67c5..b864a194c452 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py index f231ad730c77..96c094b2c71b 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py @@ -75,6 +75,7 @@ security_health_analytics_custom_config, ) from google.cloud.securitycenter_v1.types import ( + group_membership, iam_binding, indicator, kernel_rootkit, @@ -87,6 +88,7 @@ security_posture, securitycenter_service, ) +from google.cloud.securitycenter_v1.types import toxic_combination, vulnerability from google.cloud.securitycenter_v1.types import external_system as gcs_external_system from google.cloud.securitycenter_v1.types import ( notification_config as gcs_notification_config, @@ -111,7 +113,6 @@ from google.cloud.securitycenter_v1.types import security_marks from google.cloud.securitycenter_v1.types import source from google.cloud.securitycenter_v1.types import source as gcs_source -from google.cloud.securitycenter_v1.types import vulnerability from .client import SecurityCenterClient from .transports.base import DEFAULT_CLIENT_INFO, SecurityCenterTransport diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py index 77539d8ff143..024b043fb602 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py @@ -79,6 +79,7 @@ security_health_analytics_custom_config, ) from google.cloud.securitycenter_v1.types import ( + group_membership, iam_binding, indicator, kernel_rootkit, @@ -91,6 +92,7 @@ security_posture, securitycenter_service, ) +from google.cloud.securitycenter_v1.types import toxic_combination, vulnerability from google.cloud.securitycenter_v1.types import external_system as gcs_external_system from google.cloud.securitycenter_v1.types import ( notification_config as gcs_notification_config, @@ -115,7 +117,6 @@ from google.cloud.securitycenter_v1.types import security_marks from google.cloud.securitycenter_v1.types import source from google.cloud.securitycenter_v1.types import source as gcs_source -from google.cloud.securitycenter_v1.types import vulnerability from .transports.base import DEFAULT_CLIENT_INFO, SecurityCenterTransport from .transports.grpc import SecurityCenterGrpcTransport diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/__init__.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/__init__.py index fb359bf9b61a..4972daed9e1e 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/__init__.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/__init__.py @@ -40,6 +40,7 @@ from .file import File from .finding import Finding from .folder import Folder +from .group_membership import GroupMembership from .iam_binding import IamBinding from .indicator import Indicator from .kernel_rootkit import KernelRootkit @@ -120,6 +121,7 @@ UpdateSourceRequest, ) from .source import Source +from .toxic_combination import ToxicCombination from .vulnerability import ( Cve, Cvssv3, @@ -157,6 +159,7 @@ "File", "Finding", "Folder", + "GroupMembership", "IamBinding", "Indicator", "KernelRootkit", @@ -237,6 +240,7 @@ "UpdateSecurityMarksRequest", "UpdateSourceRequest", "Source", + "ToxicCombination", "Cve", "Cvssv3", "Package", diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/finding.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/finding.py index a08aa0725a6d..7aa2540a17ba 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/finding.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/finding.py @@ -27,7 +27,12 @@ contact_details, container, ) -from google.cloud.securitycenter_v1.types import external_system, file, iam_binding +from google.cloud.securitycenter_v1.types import ( + external_system, + file, + group_membership, + iam_binding, +) from google.cloud.securitycenter_v1.types import ( backup_disaster_recovery as gcs_backup_disaster_recovery, ) @@ -44,6 +49,9 @@ from google.cloud.securitycenter_v1.types import ( security_posture as gcs_security_posture, ) +from google.cloud.securitycenter_v1.types import ( + toxic_combination as gcs_toxic_combination, +) from google.cloud.securitycenter_v1.types import vulnerability as gcs_vulnerability from google.cloud.securitycenter_v1.types import access as gcs_access from google.cloud.securitycenter_v1.types import application as gcs_application @@ -268,6 +276,20 @@ class Finding(proto.Message): Fields related to Cloud Armor findings. notebook (google.cloud.securitycenter_v1.types.Notebook): Notebook associated with the finding. + toxic_combination (google.cloud.securitycenter_v1.types.ToxicCombination): + Contains details about a group of security + issues that, when the issues occur together, + represent a greater risk than when the issues + occur independently. A group of such issues is + referred to as a toxic combination. + This field cannot be updated. Its value is + ignored in all update requests. + group_memberships (MutableSequence[google.cloud.securitycenter_v1.types.GroupMembership]): + Contains details about groups of which this + finding is a member. A group is a collection of + findings that are related in some way. This + field cannot be updated. Its value is ignored in + all update requests. """ class State(proto.Enum): @@ -419,6 +441,12 @@ class FindingClass(proto.Enum): POSTURE_VIOLATION (6): Describes a potential security risk due to a change in the security posture. + TOXIC_COMBINATION (7): + Describes a group of security issues that, + when the issues occur together, represent a + greater risk than when the issues occur + independently. A group of such issues is + referred to as a toxic combination. """ FINDING_CLASS_UNSPECIFIED = 0 THREAT = 1 @@ -427,6 +455,7 @@ class FindingClass(proto.Enum): OBSERVATION = 4 SCC_ERROR = 5 POSTURE_VIOLATION = 6 + TOXIC_COMBINATION = 7 name: str = proto.Field( proto.STRING, @@ -656,6 +685,18 @@ class FindingClass(proto.Enum): number=63, message=gcs_notebook.Notebook, ) + toxic_combination: gcs_toxic_combination.ToxicCombination = proto.Field( + proto.MESSAGE, + number=64, + message=gcs_toxic_combination.ToxicCombination, + ) + group_memberships: MutableSequence[ + group_membership.GroupMembership + ] = proto.RepeatedField( + proto.MESSAGE, + number=65, + message=group_membership.GroupMembership, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/group_membership.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/group_membership.py new file mode 100644 index 000000000000..395c6bc4e6b2 --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/group_membership.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v1", + manifest={ + "GroupMembership", + }, +) + + +class GroupMembership(proto.Message): + r"""Contains details about groups of which this finding is a + member. A group is a collection of findings that are related in + some way. + + Attributes: + group_type (google.cloud.securitycenter_v1.types.GroupMembership.GroupType): + Type of group. + group_id (str): + ID of the group. + """ + + class GroupType(proto.Enum): + r"""Possible types of groups. + + Values: + GROUP_TYPE_UNSPECIFIED (0): + Default value. + GROUP_TYPE_TOXIC_COMBINATION (1): + Group represents a toxic combination. + """ + GROUP_TYPE_UNSPECIFIED = 0 + GROUP_TYPE_TOXIC_COMBINATION = 1 + + group_type: GroupType = proto.Field( + proto.ENUM, + number=1, + enum=GroupType, + ) + group_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/toxic_combination.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/toxic_combination.py new file mode 100644 index 000000000000..e6c3ef7404f7 --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/toxic_combination.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v1", + manifest={ + "ToxicCombination", + }, +) + + +class ToxicCombination(proto.Message): + r"""Contains details about a group of security issues that, when + the issues occur together, represent a greater risk than when + the issues occur independently. A group of such issues is + referred to as a toxic combination. + + Attributes: + attack_exposure_score (float): + The `Attack exposure + score `__ + of this toxic combination. The score is a measure of how + much this toxic combination exposes one or more high-value + resources to potential attack. + related_findings (MutableSequence[str]): + List of resource names of findings associated with this + toxic combination. For example, + ``organizations/123/sources/456/findings/789``. + """ + + attack_exposure_score: float = proto.Field( + proto.DOUBLE, + number=1, + ) + related_findings: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py index 558c8aab67c5..b864a194c452 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py index 558c8aab67c5..b864a194c452 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/__init__.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/__init__.py index 309dec9d5a5d..10e9a22fcd18 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/__init__.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/__init__.py @@ -25,6 +25,13 @@ from .types.attack_path import AttackPath from .types.backup_disaster_recovery import BackupDisasterRecovery from .types.bigquery_export import BigQueryExport +from .types.cloud_armor import ( + AdaptiveProtection, + Attack, + CloudArmor, + Requests, + SecurityPolicy, +) from .types.cloud_dlp_data_profile import CloudDlpDataProfile from .types.cloud_dlp_inspection import CloudDlpInspection from .types.compliance import Compliance @@ -36,6 +43,8 @@ from .types.external_system import ExternalSystem from .types.file import File from .types.finding import Finding +from .types.folder import Folder +from .types.group_membership import GroupMembership from .types.iam_binding import IamBinding from .types.indicator import Indicator from .types.kernel_rootkit import KernelRootkit @@ -45,11 +54,19 @@ from .types.log_entry import CloudLoggingEntry, LogEntry from .types.mitre_attack import MitreAttack from .types.mute_config import MuteConfig +from .types.notebook import Notebook from .types.notification_config import NotificationConfig from .types.notification_message import NotificationMessage from .types.org_policy import OrgPolicy from .types.process import EnvironmentVariable, Process -from .types.resource import Resource +from .types.resource import ( + AwsMetadata, + AzureMetadata, + CloudProvider, + GcpMetadata, + Resource, + ResourcePath, +) from .types.resource_value_config import ResourceValue, ResourceValueConfig from .types.security_marks import SecurityMarks from .types.security_posture import SecurityPosture @@ -107,6 +124,7 @@ ) from .types.simulation import Simulation from .types.source import Source +from .types.toxic_combination import ToxicCombination from .types.valued_resource import ResourceValueConfigMetadata, ValuedResource from .types.vulnerability import ( Cve, @@ -120,18 +138,24 @@ __all__ = ( "SecurityCenterAsyncClient", "Access", + "AdaptiveProtection", "Application", + "Attack", "AttackExposure", "AttackPath", + "AwsMetadata", + "AzureMetadata", "BackupDisasterRecovery", "BatchCreateResourceValueConfigsRequest", "BatchCreateResourceValueConfigsResponse", "BigQueryExport", "BulkMuteFindingsRequest", "BulkMuteFindingsResponse", + "CloudArmor", "CloudDlpDataProfile", "CloudDlpInspection", "CloudLoggingEntry", + "CloudProvider", "Compliance", "Connection", "Contact", @@ -156,6 +180,8 @@ "ExternalSystem", "File", "Finding", + "Folder", + "GcpMetadata", "Geolocation", "GetBigQueryExportRequest", "GetMuteConfigRequest", @@ -166,6 +192,7 @@ "GetValuedResourceRequest", "GroupFindingsRequest", "GroupFindingsResponse", + "GroupMembership", "GroupResult", "IamBinding", "Indicator", @@ -192,25 +219,30 @@ "LogEntry", "MitreAttack", "MuteConfig", + "Notebook", "NotificationConfig", "NotificationMessage", "OrgPolicy", "Package", "Process", "Reference", + "Requests", "Resource", + "ResourcePath", "ResourceValue", "ResourceValueConfig", "ResourceValueConfigMetadata", "SecurityBulletin", "SecurityCenterClient", "SecurityMarks", + "SecurityPolicy", "SecurityPosture", "ServiceAccountDelegationInfo", "SetFindingStateRequest", "SetMuteRequest", "Simulation", "Source", + "ToxicCombination", "UpdateBigQueryExportRequest", "UpdateExternalSystemRequest", "UpdateFindingRequest", diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py index 558c8aab67c5..b864a194c452 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py index a3cd5532fa80..a6e5f97e89b3 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py @@ -60,6 +60,7 @@ attack_path, backup_disaster_recovery, bigquery_export, + cloud_armor, cloud_dlp_data_profile, cloud_dlp_inspection, compliance, @@ -69,6 +70,7 @@ exfiltration, ) from google.cloud.securitycenter_v2.types import ( + group_membership, iam_binding, indicator, kernel_rootkit, @@ -82,6 +84,11 @@ securitycenter_service, simulation, ) +from google.cloud.securitycenter_v2.types import ( + toxic_combination, + valued_resource, + vulnerability, +) from google.cloud.securitycenter_v2.types import external_system as gcs_external_system from google.cloud.securitycenter_v2.types import ( notification_config as gcs_notification_config, @@ -95,13 +102,13 @@ from google.cloud.securitycenter_v2.types import finding as gcs_finding from google.cloud.securitycenter_v2.types import mute_config from google.cloud.securitycenter_v2.types import mute_config as gcs_mute_config +from google.cloud.securitycenter_v2.types import notebook from google.cloud.securitycenter_v2.types import notification_config -from google.cloud.securitycenter_v2.types import org_policy, process +from google.cloud.securitycenter_v2.types import org_policy, process, resource from google.cloud.securitycenter_v2.types import resource_value_config from google.cloud.securitycenter_v2.types import security_marks from google.cloud.securitycenter_v2.types import source from google.cloud.securitycenter_v2.types import source as gcs_source -from google.cloud.securitycenter_v2.types import valued_resource, vulnerability from .client import SecurityCenterClient from .transports.base import DEFAULT_CLIENT_INFO, SecurityCenterTransport @@ -2522,8 +2529,8 @@ async def sample_get_resource_value_config(): Returns: google.cloud.securitycenter_v2.types.ResourceValueConfig: - A resource value config (RVC) is a - mapping configuration of user's + A resource value configuration (RVC) + is a mapping configuration of user's resources to resource values. Used in Attack path simulations. @@ -2781,14 +2788,6 @@ async def sample_group_findings(): syntax: comma separated list of fields. For example: "parent,resource_name". - The following fields are supported: - - - resource_name - - category - - state - - parent - - severity - This corresponds to the ``group_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -5141,9 +5140,13 @@ async def sample_update_resource_value_config(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - If empty all mutable fields will be - updated. + The list of fields to be updated. If empty all mutable + fields will be updated. + + To update nested fields, include the top level field in + the mask For example, to update + gcp_metadata.resource_type, include the "gcp_metadata" + field mask This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -5156,8 +5159,8 @@ async def sample_update_resource_value_config(): Returns: google.cloud.securitycenter_v2.types.ResourceValueConfig: - A resource value config (RVC) is a - mapping configuration of user's + A resource value configuration (RVC) + is a mapping configuration of user's resources to resource values. Used in Attack path simulations. diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py index 19ffa241ccf3..75abe19a7d55 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py @@ -65,6 +65,7 @@ attack_path, backup_disaster_recovery, bigquery_export, + cloud_armor, cloud_dlp_data_profile, cloud_dlp_inspection, compliance, @@ -74,6 +75,7 @@ exfiltration, ) from google.cloud.securitycenter_v2.types import ( + group_membership, iam_binding, indicator, kernel_rootkit, @@ -87,6 +89,11 @@ securitycenter_service, simulation, ) +from google.cloud.securitycenter_v2.types import ( + toxic_combination, + valued_resource, + vulnerability, +) from google.cloud.securitycenter_v2.types import external_system as gcs_external_system from google.cloud.securitycenter_v2.types import ( notification_config as gcs_notification_config, @@ -100,13 +107,13 @@ from google.cloud.securitycenter_v2.types import finding as gcs_finding from google.cloud.securitycenter_v2.types import mute_config from google.cloud.securitycenter_v2.types import mute_config as gcs_mute_config +from google.cloud.securitycenter_v2.types import notebook from google.cloud.securitycenter_v2.types import notification_config -from google.cloud.securitycenter_v2.types import org_policy, process +from google.cloud.securitycenter_v2.types import org_policy, process, resource from google.cloud.securitycenter_v2.types import resource_value_config from google.cloud.securitycenter_v2.types import security_marks from google.cloud.securitycenter_v2.types import source from google.cloud.securitycenter_v2.types import source as gcs_source -from google.cloud.securitycenter_v2.types import valued_resource, vulnerability from .transports.base import DEFAULT_CLIENT_INFO, SecurityCenterTransport from .transports.grpc import SecurityCenterGrpcTransport @@ -3211,8 +3218,8 @@ def sample_get_resource_value_config(): Returns: google.cloud.securitycenter_v2.types.ResourceValueConfig: - A resource value config (RVC) is a - mapping configuration of user's + A resource value configuration (RVC) + is a mapping configuration of user's resources to resource values. Used in Attack path simulations. @@ -3466,14 +3473,6 @@ def sample_group_findings(): syntax: comma separated list of fields. For example: "parent,resource_name". - The following fields are supported: - - - resource_name - - category - - state - - parent - - severity - This corresponds to the ``group_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -5830,9 +5829,13 @@ def sample_update_resource_value_config(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - If empty all mutable fields will be - updated. + The list of fields to be updated. If empty all mutable + fields will be updated. + + To update nested fields, include the top level field in + the mask For example, to update + gcp_metadata.resource_type, include the "gcp_metadata" + field mask This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -5845,8 +5848,8 @@ def sample_update_resource_value_config(): Returns: google.cloud.securitycenter_v2.types.ResourceValueConfig: - A resource value config (RVC) is a - mapping configuration of user's + A resource value configuration (RVC) + is a mapping configuration of user's resources to resource values. Used in Attack path simulations. diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/transports/rest.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/transports/rest.py index 242131a7405f..4761b615b968 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/transports/rest.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/transports/rest.py @@ -1605,6 +1605,11 @@ def __call__( "uri": "/v2/{parent=organizations/*}/resourceValueConfigs:batchCreate", "body": "*", }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/resourceValueConfigs:batchCreate", + "body": "*", + }, ] ( request, @@ -2661,6 +2666,10 @@ def __call__( "method": "delete", "uri": "/v2/{name=organizations/*/resourceValueConfigs/*}", }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/resourceValueConfigs/*}", + }, ] request, metadata = self._interceptor.pre_delete_resource_value_config( request, metadata @@ -3212,8 +3221,8 @@ def __call__( Returns: ~.resource_value_config.ResourceValueConfig: - A resource value config (RVC) is a - mapping configuration of user's + A resource value configuration (RVC) + is a mapping configuration of user's resources to resource values. Used in Attack path simulations. @@ -3224,6 +3233,10 @@ def __call__( "method": "get", "uri": "/v2/{name=organizations/*/resourceValueConfigs/*}", }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/resourceValueConfigs/*}", + }, ] request, metadata = self._interceptor.pre_get_resource_value_config( request, metadata @@ -3317,6 +3330,10 @@ def __call__( "method": "get", "uri": "/v2/{name=organizations/*/simulations/*}", }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/simulations/*}", + }, ] request, metadata = self._interceptor.pre_get_simulation(request, metadata) pb_request = securitycenter_service.GetSimulationRequest.pb(request) @@ -3496,6 +3513,10 @@ def __call__( "method": "get", "uri": "/v2/{name=organizations/*/simulations/*/valuedResources/*}", }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/simulations/*/valuedResources/*}", + }, ] request, metadata = self._interceptor.pre_get_valued_resource( request, metadata @@ -3712,10 +3733,18 @@ def __call__( "method": "get", "uri": "/v2/{parent=organizations/*/simulations/*/valuedResources/*}/attackPaths", }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*/simulations/*/valuedResources/*}/attackPaths", + }, { "method": "get", "uri": "/v2/{parent=organizations/*/simulations/*/attackExposureResults/*}/attackPaths", }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*/simulations/*/attackExposureResults/*}/attackPaths", + }, ] request, metadata = self._interceptor.pre_list_attack_paths( request, metadata @@ -4226,6 +4255,10 @@ def __call__( "method": "get", "uri": "/v2/{parent=organizations/*}/resourceValueConfigs", }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/resourceValueConfigs", + }, ] request, metadata = self._interceptor.pre_list_resource_value_configs( request, metadata @@ -5609,8 +5642,8 @@ def __call__( Returns: ~.gcs_resource_value_config.ResourceValueConfig: - A resource value config (RVC) is a - mapping configuration of user's + A resource value configuration (RVC) + is a mapping configuration of user's resources to resource values. Used in Attack path simulations. @@ -5622,6 +5655,11 @@ def __call__( "uri": "/v2/{resource_value_config.name=organizations/*/resourceValueConfigs/*}", "body": "resource_value_config", }, + { + "method": "patch", + "uri": "/v2/{resource_value_config.name=organizations/*/locations/*/resourceValueConfigs/*}", + "body": "resource_value_config", + }, ] request, metadata = self._interceptor.pre_update_resource_value_config( request, metadata diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/__init__.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/__init__.py index b02c85e16a50..a091eb96721b 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/__init__.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/__init__.py @@ -19,6 +19,13 @@ from .attack_path import AttackPath from .backup_disaster_recovery import BackupDisasterRecovery from .bigquery_export import BigQueryExport +from .cloud_armor import ( + AdaptiveProtection, + Attack, + CloudArmor, + Requests, + SecurityPolicy, +) from .cloud_dlp_data_profile import CloudDlpDataProfile from .cloud_dlp_inspection import CloudDlpInspection from .compliance import Compliance @@ -30,6 +37,8 @@ from .external_system import ExternalSystem from .file import File from .finding import Finding +from .folder import Folder +from .group_membership import GroupMembership from .iam_binding import IamBinding from .indicator import Indicator from .kernel_rootkit import KernelRootkit @@ -39,11 +48,19 @@ from .log_entry import CloudLoggingEntry, LogEntry from .mitre_attack import MitreAttack from .mute_config import MuteConfig +from .notebook import Notebook from .notification_config import NotificationConfig from .notification_message import NotificationMessage from .org_policy import OrgPolicy from .process import EnvironmentVariable, Process -from .resource import Resource +from .resource import ( + AwsMetadata, + AzureMetadata, + CloudProvider, + GcpMetadata, + Resource, + ResourcePath, +) from .resource_value_config import ResourceValue, ResourceValueConfig from .security_marks import SecurityMarks from .security_posture import SecurityPosture @@ -101,6 +118,7 @@ ) from .simulation import Simulation from .source import Source +from .toxic_combination import ToxicCombination from .valued_resource import ResourceValueConfigMetadata, ValuedResource from .vulnerability import ( Cve, @@ -120,6 +138,11 @@ "AttackPath", "BackupDisasterRecovery", "BigQueryExport", + "AdaptiveProtection", + "Attack", + "CloudArmor", + "Requests", + "SecurityPolicy", "CloudDlpDataProfile", "CloudDlpInspection", "Compliance", @@ -133,6 +156,8 @@ "ExternalSystem", "File", "Finding", + "Folder", + "GroupMembership", "IamBinding", "Indicator", "KernelRootkit", @@ -143,12 +168,18 @@ "LogEntry", "MitreAttack", "MuteConfig", + "Notebook", "NotificationConfig", "NotificationMessage", "OrgPolicy", "EnvironmentVariable", "Process", + "AwsMetadata", + "AzureMetadata", + "GcpMetadata", "Resource", + "ResourcePath", + "CloudProvider", "ResourceValueConfig", "ResourceValue", "SecurityMarks", @@ -205,6 +236,7 @@ "UpdateSourceRequest", "Simulation", "Source", + "ToxicCombination", "ResourceValueConfigMetadata", "ValuedResource", "Cve", diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/cloud_armor.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/cloud_armor.py new file mode 100644 index 000000000000..7a208bf9c56f --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/cloud_armor.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v2", + manifest={ + "CloudArmor", + "SecurityPolicy", + "Requests", + "AdaptiveProtection", + "Attack", + }, +) + + +class CloudArmor(proto.Message): + r"""Fields related to Google Cloud Armor findings. + + Attributes: + security_policy (google.cloud.securitycenter_v2.types.SecurityPolicy): + Information about the `Google Cloud Armor security + policy `__ + relevant to the finding. + requests (google.cloud.securitycenter_v2.types.Requests): + Information about incoming requests evaluated by `Google + Cloud Armor security + policies `__. + adaptive_protection (google.cloud.securitycenter_v2.types.AdaptiveProtection): + Information about potential Layer 7 DDoS attacks identified + by `Google Cloud Armor Adaptive + Protection `__. + attack (google.cloud.securitycenter_v2.types.Attack): + Information about DDoS attack volume and + classification. + threat_vector (str): + Distinguish between volumetric & protocol DDoS attack and + application layer attacks. For example, "L3_4" for Layer 3 + and Layer 4 DDoS attacks, or "L_7" for Layer 7 DDoS attacks. + duration (google.protobuf.duration_pb2.Duration): + Duration of attack from the start until the + current moment (updated every 5 minutes). + """ + + security_policy: "SecurityPolicy" = proto.Field( + proto.MESSAGE, + number=1, + message="SecurityPolicy", + ) + requests: "Requests" = proto.Field( + proto.MESSAGE, + number=2, + message="Requests", + ) + adaptive_protection: "AdaptiveProtection" = proto.Field( + proto.MESSAGE, + number=3, + message="AdaptiveProtection", + ) + attack: "Attack" = proto.Field( + proto.MESSAGE, + number=4, + message="Attack", + ) + threat_vector: str = proto.Field( + proto.STRING, + number=5, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + + +class SecurityPolicy(proto.Message): + r"""Information about the `Google Cloud Armor security + policy `__ + relevant to the finding. + + Attributes: + name (str): + The name of the Google Cloud Armor security + policy, for example, "my-security-policy". + type_ (str): + The type of Google Cloud Armor security + policy for example, 'backend security policy', + 'edge security policy', 'network edge security + policy', or 'always-on DDoS protection'. + preview (bool): + Whether or not the associated rule or policy + is in preview mode. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + preview: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class Requests(proto.Message): + r"""Information about the requests relevant to the finding. + + Attributes: + ratio (float): + For 'Increasing deny ratio', the ratio is the + denied traffic divided by the allowed traffic. + For 'Allowed traffic spike', the ratio is the + allowed traffic in the short term divided by + allowed traffic in the long term. + short_term_allowed (int): + Allowed RPS (requests per second) in the + short term. + long_term_allowed (int): + Allowed RPS (requests per second) over the + long term. + long_term_denied (int): + Denied RPS (requests per second) over the + long term. + """ + + ratio: float = proto.Field( + proto.DOUBLE, + number=1, + ) + short_term_allowed: int = proto.Field( + proto.INT32, + number=2, + ) + long_term_allowed: int = proto.Field( + proto.INT32, + number=3, + ) + long_term_denied: int = proto.Field( + proto.INT32, + number=4, + ) + + +class AdaptiveProtection(proto.Message): + r"""Information about `Google Cloud Armor Adaptive + Protection `__. + + Attributes: + confidence (float): + A score of 0 means that there is low confidence that the + detected event is an actual attack. A score of 1 means that + there is high confidence that the detected event is an + attack. See the `Adaptive Protection + documentation `__ + for further explanation. + """ + + confidence: float = proto.Field( + proto.DOUBLE, + number=1, + ) + + +class Attack(proto.Message): + r"""Information about DDoS attack volume and classification. + + Attributes: + volume_pps (int): + Total PPS (packets per second) volume of + attack. + volume_bps (int): + Total BPS (bytes per second) volume of + attack. + classification (str): + Type of attack, for example, 'SYN-flood', + 'NTP-udp', or 'CHARGEN-udp'. + """ + + volume_pps: int = proto.Field( + proto.INT32, + number=1, + ) + volume_bps: int = proto.Field( + proto.INT32, + number=2, + ) + classification: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/finding.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/finding.py index ae7258129f30..e355720e1846 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/finding.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/finding.py @@ -27,7 +27,12 @@ contact_details, container, ) -from google.cloud.securitycenter_v2.types import external_system, file, iam_binding +from google.cloud.securitycenter_v2.types import ( + external_system, + file, + group_membership, + iam_binding, +) from google.cloud.securitycenter_v2.types import attack_exposure as gcs_attack_exposure from google.cloud.securitycenter_v2.types import ( backup_disaster_recovery as gcs_backup_disaster_recovery, @@ -45,13 +50,18 @@ from google.cloud.securitycenter_v2.types import ( security_posture as gcs_security_posture, ) +from google.cloud.securitycenter_v2.types import ( + toxic_combination as gcs_toxic_combination, +) from google.cloud.securitycenter_v2.types import vulnerability as gcs_vulnerability from google.cloud.securitycenter_v2.types import access as gcs_access from google.cloud.securitycenter_v2.types import application as gcs_application +from google.cloud.securitycenter_v2.types import cloud_armor as gcs_cloud_armor from google.cloud.securitycenter_v2.types import database as gcs_database from google.cloud.securitycenter_v2.types import indicator as gcs_indicator from google.cloud.securitycenter_v2.types import kubernetes as gcs_kubernetes from google.cloud.securitycenter_v2.types import load_balancer, log_entry +from google.cloud.securitycenter_v2.types import notebook as gcs_notebook from google.cloud.securitycenter_v2.types import org_policy, process __protobuf__ = proto.module( @@ -302,6 +312,24 @@ class Finding(proto.Message): load_balancers (MutableSequence[google.cloud.securitycenter_v2.types.LoadBalancer]): The load balancers associated with the finding. + cloud_armor (google.cloud.securitycenter_v2.types.CloudArmor): + Fields related to Cloud Armor findings. + notebook (google.cloud.securitycenter_v2.types.Notebook): + Notebook associated with the finding. + toxic_combination (google.cloud.securitycenter_v2.types.ToxicCombination): + Contains details about a group of security + issues that, when the issues occur together, + represent a greater risk than when the issues + occur independently. A group of such issues is + referred to as a toxic combination. + This field cannot be updated. Its value is + ignored in all update requests. + group_memberships (MutableSequence[google.cloud.securitycenter_v2.types.GroupMembership]): + Contains details about groups of which this + finding is a member. A group is a collection of + findings that are related in some way. This + field cannot be updated. Its value is ignored in + all update requests. """ class State(proto.Enum): @@ -453,6 +481,10 @@ class FindingClass(proto.Enum): POSTURE_VIOLATION (6): Describes a potential security risk due to a change in the security posture. + TOXIC_COMBINATION (7): + Describes a combination of security issues + that represent a more severe security problem + when taken together. """ FINDING_CLASS_UNSPECIFIED = 0 THREAT = 1 @@ -461,6 +493,7 @@ class FindingClass(proto.Enum): OBSERVATION = 4 SCC_ERROR = 5 POSTURE_VIOLATION = 6 + TOXIC_COMBINATION = 7 name: str = proto.Field( proto.STRING, @@ -685,6 +718,28 @@ class FindingClass(proto.Enum): number=50, message=load_balancer.LoadBalancer, ) + cloud_armor: gcs_cloud_armor.CloudArmor = proto.Field( + proto.MESSAGE, + number=51, + message=gcs_cloud_armor.CloudArmor, + ) + notebook: gcs_notebook.Notebook = proto.Field( + proto.MESSAGE, + number=55, + message=gcs_notebook.Notebook, + ) + toxic_combination: gcs_toxic_combination.ToxicCombination = proto.Field( + proto.MESSAGE, + number=56, + message=gcs_toxic_combination.ToxicCombination, + ) + group_memberships: MutableSequence[ + group_membership.GroupMembership + ] = proto.RepeatedField( + proto.MESSAGE, + number=57, + message=group_membership.GroupMembership, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/folder.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/folder.py new file mode 100644 index 000000000000..803c34df2cbf --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/folder.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v2", + manifest={ + "Folder", + }, +) + + +class Folder(proto.Message): + r"""Message that contains the resource name and display name of a + folder resource. + + Attributes: + resource_folder (str): + Full resource name of this folder. See: + https://cloud.google.com/apis/design/resource_names#full_resource_name + resource_folder_display_name (str): + The user defined display name for this + folder. + """ + + resource_folder: str = proto.Field( + proto.STRING, + number=1, + ) + resource_folder_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/group_membership.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/group_membership.py new file mode 100644 index 000000000000..18a9ac0e5b39 --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/group_membership.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v2", + manifest={ + "GroupMembership", + }, +) + + +class GroupMembership(proto.Message): + r"""Contains details about groups of which this finding is a + member. A group is a collection of findings that are related in + some way. + + Attributes: + group_type (google.cloud.securitycenter_v2.types.GroupMembership.GroupType): + Type of group. + group_id (str): + ID of the group. + """ + + class GroupType(proto.Enum): + r"""Possible types of groups. + + Values: + GROUP_TYPE_UNSPECIFIED (0): + Default value. + GROUP_TYPE_TOXIC_COMBINATION (1): + Group represents a toxic combination. + """ + GROUP_TYPE_UNSPECIFIED = 0 + GROUP_TYPE_TOXIC_COMBINATION = 1 + + group_type: GroupType = proto.Field( + proto.ENUM, + number=1, + enum=GroupType, + ) + group_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/mitre_attack.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/mitre_attack.py index d32e0b7cdc90..6fcfb196ace4 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/mitre_attack.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/mitre_attack.py @@ -111,7 +111,7 @@ class Tactic(proto.Enum): class Technique(proto.Enum): r"""MITRE ATT&CK techniques that can be referenced by SCC findings. See: https://attack.mitre.org/techniques/enterprise/ - Next ID: 59 + Next ID: 63 Values: TECHNIQUE_UNSPECIFIED (0): @@ -132,6 +132,8 @@ class Technique(proto.Enum): T1059 UNIX_SHELL (7): T1059.004 + PYTHON (59): + T1059.006 PERMISSION_GROUPS_DISCOVERY (18): T1069 CLOUD_GROUPS (19): @@ -230,8 +232,14 @@ class Technique(proto.Enum): T1595 SCANNING_IP_BLOCKS (2): T1595.001 + CONTAINER_ADMINISTRATION_COMMAND (60): + T1613 + ESCAPE_TO_HOST (61): + T1611 CONTAINER_AND_RESOURCE_DISCOVERY (57): T1613 + STEAL_OR_FORGE_AUTHENTICATION_CERTIFICATES (62): + T1649 """ TECHNIQUE_UNSPECIFIED = 0 MASQUERADING = 49 @@ -242,6 +250,7 @@ class Technique(proto.Enum): PROCESS_DISCOVERY = 56 COMMAND_AND_SCRIPTING_INTERPRETER = 6 UNIX_SHELL = 7 + PYTHON = 59 PERMISSION_GROUPS_DISCOVERY = 18 CLOUD_GROUPS = 19 APPLICATION_LAYER_PROTOCOL = 45 @@ -291,7 +300,10 @@ class Technique(proto.Enum): OBTAIN_CAPABILITIES = 43 ACTIVE_SCANNING = 1 SCANNING_IP_BLOCKS = 2 + CONTAINER_ADMINISTRATION_COMMAND = 60 + ESCAPE_TO_HOST = 61 CONTAINER_AND_RESOURCE_DISCOVERY = 57 + STEAL_OR_FORGE_AUTHENTICATION_CERTIFICATES = 62 primary_tactic: Tactic = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/notebook.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/notebook.py new file mode 100644 index 000000000000..b1b43b121c24 --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/notebook.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v2", + manifest={ + "Notebook", + }, +) + + +class Notebook(proto.Message): + r"""Represents a Jupyter notebook IPYNB file, such as a `Colab + Enterprise + notebook `__ file, + that is associated with a finding. + + Attributes: + name (str): + The name of the notebook. + service (str): + The source notebook service, for example, + "Colab Enterprise". + last_author (str): + The user ID of the latest author to modify + the notebook. + notebook_update_time (google.protobuf.timestamp_pb2.Timestamp): + The most recent time the notebook was + updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + service: str = proto.Field( + proto.STRING, + number=2, + ) + last_author: str = proto.Field( + proto.STRING, + number=3, + ) + notebook_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource.py index e05881566ed9..06bb401f4d6f 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource.py @@ -19,17 +19,50 @@ import proto # type: ignore +from google.cloud.securitycenter_v2.types import folder + __protobuf__ = proto.module( package="google.cloud.securitycenter.v2", manifest={ + "CloudProvider", "Resource", + "GcpMetadata", + "AwsMetadata", + "AzureMetadata", + "ResourcePath", }, ) +class CloudProvider(proto.Enum): + r"""The cloud provider the finding pertains to. + + Values: + CLOUD_PROVIDER_UNSPECIFIED (0): + The cloud provider is unspecified. + GOOGLE_CLOUD_PLATFORM (1): + The cloud provider is Google Cloud Platform. + AMAZON_WEB_SERVICES (2): + The cloud provider is Amazon Web Services. + MICROSOFT_AZURE (3): + The cloud provider is Microsoft Azure. + """ + CLOUD_PROVIDER_UNSPECIFIED = 0 + GOOGLE_CLOUD_PLATFORM = 1 + AMAZON_WEB_SERVICES = 2 + MICROSOFT_AZURE = 3 + + class Resource(proto.Message): r"""Information related to the Google Cloud resource. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): The full resource name of the resource. See: @@ -38,6 +71,42 @@ class Resource(proto.Message): The human readable name of the resource. type_ (str): The full resource type of the resource. + cloud_provider (google.cloud.securitycenter_v2.types.CloudProvider): + Indicates which cloud provider the finding is + from. + service (str): + The service or resource provider associated + with the resource. + location (str): + The region or location of the service (if + applicable). + gcp_metadata (google.cloud.securitycenter_v2.types.GcpMetadata): + The GCP metadata associated with the finding. + + This field is a member of `oneof`_ ``cloud_provider_metadata``. + aws_metadata (google.cloud.securitycenter_v2.types.AwsMetadata): + The AWS metadata associated with the finding. + + This field is a member of `oneof`_ ``cloud_provider_metadata``. + azure_metadata (google.cloud.securitycenter_v2.types.AzureMetadata): + The Azure metadata associated with the + finding. + + This field is a member of `oneof`_ ``cloud_provider_metadata``. + resource_path (google.cloud.securitycenter_v2.types.ResourcePath): + Provides the path to the resource within the + resource hierarchy. + resource_path_string (str): + A string representation of the resource path. For Google + Cloud, it has the format of + organizations/{organization_id}/folders/{folder_id}/folders/{folder_id}/projects/{project_id} + where there can be any number of folders. For AWS, it has + the format of + org/{organization_id}/ou/{organizational_unit_id}/ou/{organizational_unit_id}/account/{account_id} + where there can be any number of organizational units. For + Azure, it has the format of + mg/{management_group_id}/mg/{management_group_id}/subscription/{subscription_id}/rg/{resource_group_name} + where there can be any number of management groups. """ name: str = proto.Field( @@ -52,6 +121,373 @@ class Resource(proto.Message): proto.STRING, number=3, ) + cloud_provider: "CloudProvider" = proto.Field( + proto.ENUM, + number=4, + enum="CloudProvider", + ) + service: str = proto.Field( + proto.STRING, + number=5, + ) + location: str = proto.Field( + proto.STRING, + number=6, + ) + gcp_metadata: "GcpMetadata" = proto.Field( + proto.MESSAGE, + number=7, + oneof="cloud_provider_metadata", + message="GcpMetadata", + ) + aws_metadata: "AwsMetadata" = proto.Field( + proto.MESSAGE, + number=8, + oneof="cloud_provider_metadata", + message="AwsMetadata", + ) + azure_metadata: "AzureMetadata" = proto.Field( + proto.MESSAGE, + number=9, + oneof="cloud_provider_metadata", + message="AzureMetadata", + ) + resource_path: "ResourcePath" = proto.Field( + proto.MESSAGE, + number=10, + message="ResourcePath", + ) + resource_path_string: str = proto.Field( + proto.STRING, + number=11, + ) + + +class GcpMetadata(proto.Message): + r"""GCP metadata associated with the resource, only applicable if + the finding's cloud provider is Google Cloud Platform. + + Attributes: + project (str): + The full resource name of project that the + resource belongs to. + project_display_name (str): + The project ID that the resource belongs to. + parent (str): + The full resource name of resource's parent. + parent_display_name (str): + The human readable name of resource's parent. + folders (MutableSequence[google.cloud.securitycenter_v2.types.Folder]): + Output only. Contains a Folder message for + each folder in the assets ancestry. The first + folder is the deepest nested folder, and the + last folder is the folder directly under the + Organization. + organization (str): + The name of the organization that the + resource belongs to. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + project_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=3, + ) + parent_display_name: str = proto.Field( + proto.STRING, + number=4, + ) + folders: MutableSequence[folder.Folder] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=folder.Folder, + ) + organization: str = proto.Field( + proto.STRING, + number=6, + ) + + +class AwsMetadata(proto.Message): + r"""AWS metadata associated with the resource, only applicable if + the finding's cloud provider is Amazon Web Services. + + Attributes: + organization (google.cloud.securitycenter_v2.types.AwsMetadata.AwsOrganization): + The AWS organization associated with the + resource. + organizational_units (MutableSequence[google.cloud.securitycenter_v2.types.AwsMetadata.AwsOrganizationalUnit]): + A list of AWS organizational units associated + with the resource, ordered from lowest level + (closest to the account) to highest level. + account (google.cloud.securitycenter_v2.types.AwsMetadata.AwsAccount): + The AWS account associated with the resource. + """ + + class AwsOrganization(proto.Message): + r"""An organization is a collection of accounts that are + centrally managed together using consolidated billing, organized + hierarchically with organizational units (OUs), and controlled + with policies. + + Attributes: + id (str): + The unique identifier (ID) for the + organization. The regex pattern for an + organization ID string requires "o-" followed by + from 10 to 32 lowercase letters or digits. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + + class AwsOrganizationalUnit(proto.Message): + r"""An Organizational Unit (OU) is a container of AWS accounts + within a root of an organization. Policies that are attached to + an OU apply to all accounts contained in that OU and in any + child OUs. + + Attributes: + id (str): + The unique identifier (ID) associated with + this OU. The regex pattern for an organizational + unit ID string requires "ou-" followed by from 4 + to 32 lowercase letters or digits (the ID of the + root that contains the OU). This string is + followed by a second "-" dash and from 8 to 32 + additional lowercase letters or digits. For + example, "ou-ab12-cd34ef56". + name (str): + The friendly name of the OU. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + class AwsAccount(proto.Message): + r"""An AWS account that is a member of an organization. + + Attributes: + id (str): + The unique identifier (ID) of the account, + containing exactly 12 digits. + name (str): + The friendly name of this account. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + organization: AwsOrganization = proto.Field( + proto.MESSAGE, + number=1, + message=AwsOrganization, + ) + organizational_units: MutableSequence[AwsOrganizationalUnit] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=AwsOrganizationalUnit, + ) + account: AwsAccount = proto.Field( + proto.MESSAGE, + number=3, + message=AwsAccount, + ) + + +class AzureMetadata(proto.Message): + r"""Azure metadata associated with the resource, only applicable + if the finding's cloud provider is Microsoft Azure. + + Attributes: + management_groups (MutableSequence[google.cloud.securitycenter_v2.types.AzureMetadata.AzureManagementGroup]): + A list of Azure management groups associated + with the resource, ordered from lowest level + (closest to the subscription) to highest level. + subscription (google.cloud.securitycenter_v2.types.AzureMetadata.AzureSubscription): + The Azure subscription associated with the + resource. + resource_group (google.cloud.securitycenter_v2.types.AzureMetadata.AzureResourceGroup): + The Azure resource group associated with the + resource. + """ + + class AzureManagementGroup(proto.Message): + r"""Represents an Azure management group. + + Attributes: + id (str): + The UUID of the Azure management group, for + example, "20000000-0001-0000-0000-000000000000". + display_name (str): + The display name of the Azure management + group. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class AzureSubscription(proto.Message): + r"""Represents an Azure subscription. + + Attributes: + id (str): + The UUID of the Azure subscription, for + example, "291bba3f-e0a5-47bc-a099-3bdcb2a50a05". + display_name (str): + The display name of the Azure subscription. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class AzureResourceGroup(proto.Message): + r"""Represents an Azure resource group. + + Attributes: + name (str): + The name of the Azure resource group. This is + not a UUID. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + management_groups: MutableSequence[AzureManagementGroup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=AzureManagementGroup, + ) + subscription: AzureSubscription = proto.Field( + proto.MESSAGE, + number=2, + message=AzureSubscription, + ) + resource_group: AzureResourceGroup = proto.Field( + proto.MESSAGE, + number=3, + message=AzureResourceGroup, + ) + + +class ResourcePath(proto.Message): + r"""Represents the path of resources leading up to the resource + this finding is about. + + Attributes: + nodes (MutableSequence[google.cloud.securitycenter_v2.types.ResourcePath.ResourcePathNode]): + The list of nodes that make the up resource + path, ordered from lowest level to highest + level. + """ + + class ResourcePathNodeType(proto.Enum): + r"""The type of resource the node represents. + + Values: + RESOURCE_PATH_NODE_TYPE_UNSPECIFIED (0): + Node type is unspecified. + GCP_ORGANIZATION (1): + The node represents a Google Cloud + organization. + GCP_FOLDER (2): + The node represents a Google Cloud folder. + GCP_PROJECT (3): + The node represents a Google Cloud project. + AWS_ORGANIZATION (4): + The node represents an AWS organization. + AWS_ORGANIZATIONAL_UNIT (5): + The node represents an AWS organizational + unit. + AWS_ACCOUNT (6): + The node represents an AWS account. + AZURE_MANAGEMENT_GROUP (7): + The node represents an Azure management + group. + AZURE_SUBSCRIPTION (8): + The node represents an Azure subscription. + AZURE_RESOURCE_GROUP (9): + The node represents an Azure resource group. + """ + RESOURCE_PATH_NODE_TYPE_UNSPECIFIED = 0 + GCP_ORGANIZATION = 1 + GCP_FOLDER = 2 + GCP_PROJECT = 3 + AWS_ORGANIZATION = 4 + AWS_ORGANIZATIONAL_UNIT = 5 + AWS_ACCOUNT = 6 + AZURE_MANAGEMENT_GROUP = 7 + AZURE_SUBSCRIPTION = 8 + AZURE_RESOURCE_GROUP = 9 + + class ResourcePathNode(proto.Message): + r"""A node within the resource path. Each node represents a + resource within the resource hierarchy. + + Attributes: + node_type (google.cloud.securitycenter_v2.types.ResourcePath.ResourcePathNodeType): + The type of resource this node represents. + id (str): + The ID of the resource this node represents. + display_name (str): + The display name of the resource this node + represents. + """ + + node_type: "ResourcePath.ResourcePathNodeType" = proto.Field( + proto.ENUM, + number=1, + enum="ResourcePath.ResourcePathNodeType", + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + + nodes: MutableSequence[ResourcePathNode] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ResourcePathNode, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource_value_config.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource_value_config.py index db5bd490b4f7..01e4060766ff 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource_value_config.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/resource_value_config.py @@ -20,6 +20,8 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.securitycenter_v2.types import resource + __protobuf__ = proto.module( package="google.cloud.securitycenter.v2", manifest={ @@ -53,48 +55,52 @@ class ResourceValue(proto.Enum): class ResourceValueConfig(proto.Message): - r"""A resource value config (RVC) is a mapping configuration of - user's resources to resource values. Used in Attack path - simulations. + r"""A resource value configuration (RVC) is a mapping + configuration of user's resources to resource values. Used in + Attack path simulations. Attributes: name (str): - Name for the resource value config + Name for the resource value configuration resource_value (google.cloud.securitycenter_v2.types.ResourceValue): Resource value level this expression represents Only required when there is no SDP mapping in the request tag_values (MutableSequence[str]): Required. Tag values combined with AND to check against. - Values in the form "tagValues/123" E.g. [ "tagValues/123", - "tagValues/456", "tagValues/789" ] + Values in the form "tagValues/123" Example: [ + "tagValues/123", "tagValues/456", "tagValues/789" ] https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing resource_type (str): Apply resource_value only to resources that match - resource_type. resource_type will be checked with "AND" of - other resources. E.g. "storage.googleapis.com/Bucket" with - resource_value "HIGH" will apply "HIGH" value only to + resource_type. resource_type will be checked with AND of + other resources. For example, + "storage.googleapis.com/Bucket" with resource_value "HIGH" + will apply "HIGH" value only to "storage.googleapis.com/Bucket" resources. scope (str): - Project or folder to scope this config to. - For example, "project/456" would apply this - config only to resources in "project/456" - scope will be checked with "AND" of other - resources. + Project or folder to scope this configuration + to. For example, "project/456" would apply this + configuration only to resources in "project/456" + scope will be checked with AND of + other resources. resource_labels_selector (MutableMapping[str, str]): List of resource labels to search for, evaluated with AND. - E.g. "resource_labels_selector": {"key": "value", "env": - "prod"} will match resources with labels "key": "value" AND - "env": "prod" + For example, "resource_labels_selector": {"key": "value", + "env": "prod"} will match resources with labels "key": + "value" AND "env": "prod" https://cloud.google.com/resource-manager/docs/creating-managing-labels description (str): - Description of the resource value config. + Description of the resource value + configuration. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp this resource value - config was created. + configuration was created. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp this resource value - config was last updated. + configuration was last updated. + cloud_provider (google.cloud.securitycenter_v2.types.CloudProvider): + Cloud provider this configuration applies to sensitive_data_protection_mapping (google.cloud.securitycenter_v2.types.ResourceValueConfig.SensitiveDataProtectionMapping): A mapping of the sensitivity on Sensitive Data Protection finding to resource values. This mapping can only be used in @@ -168,6 +174,11 @@ class SensitiveDataProtectionMapping(proto.Message): number=9, message=timestamp_pb2.Timestamp, ) + cloud_provider: resource.CloudProvider = proto.Field( + proto.ENUM, + number=10, + enum=resource.CloudProvider, + ) sensitive_data_protection_mapping: SensitiveDataProtectionMapping = proto.Field( proto.MESSAGE, number=11, diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/securitycenter_service.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/securitycenter_service.py index 9229052e10e1..ffb631a1e35e 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/securitycenter_service.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/securitycenter_service.py @@ -32,6 +32,7 @@ from google.cloud.securitycenter_v2.types import attack_path, bigquery_export from google.cloud.securitycenter_v2.types import finding as gcs_finding from google.cloud.securitycenter_v2.types import mute_config as gcs_mute_config +from google.cloud.securitycenter_v2.types import resource as gcs_resource from google.cloud.securitycenter_v2.types import source as gcs_source from google.cloud.securitycenter_v2.types import valued_resource @@ -670,14 +671,6 @@ class GroupFindingsRequest(proto.Message): for grouping. The string value should follow SQL syntax: comma separated list of fields. For example: "parent,resource_name". - - The following fields are supported: - - - resource_name - - category - - state - - parent - - severity page_token (str): The value returned by the last ``GroupFindingsResponse``; indicates that this is a continuation of a prior @@ -1126,6 +1119,13 @@ class Resource(proto.Message): r"""Information related to the Google Cloud resource that is associated with this finding. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): The full resource name of the resource. See: @@ -1134,6 +1134,42 @@ class Resource(proto.Message): The human readable name of the resource. type_ (str): The full resource type of the resource. + cloud_provider (google.cloud.securitycenter_v2.types.CloudProvider): + Indicates which cloud provider the finding is + from. + service (str): + The service or resource provider associated + with the resource. + location (str): + The region or location of the service (if + applicable). + gcp_metadata (google.cloud.securitycenter_v2.types.GcpMetadata): + The GCP metadata associated with the finding. + + This field is a member of `oneof`_ ``cloud_provider_metadata``. + aws_metadata (google.cloud.securitycenter_v2.types.AwsMetadata): + The AWS metadata associated with the finding. + + This field is a member of `oneof`_ ``cloud_provider_metadata``. + azure_metadata (google.cloud.securitycenter_v2.types.AzureMetadata): + The Azure metadata associated with the + finding. + + This field is a member of `oneof`_ ``cloud_provider_metadata``. + resource_path (google.cloud.securitycenter_v2.types.ResourcePath): + Provides the path to the resource within the + resource hierarchy. + resource_path_string (str): + A string representation of the resource path. For Google + Cloud, it has the format of + organizations/{organization_id}/folders/{folder_id}/folders/{folder_id}/projects/{project_id} + where there can be any number of folders. For AWS, it has + the format of + org/{organization_id}/ou/{organizational_unit_id}/ou/{organizational_unit_id}/account/{account_id} + where there can be any number of organizational units. For + Azure, it has the format of + mg/{management_group_id}/mg/{management_group_id}/subscription/{subscription_id}/rg/{resource_group_name} + where there can be any number of management groups. """ name: str = proto.Field( @@ -1148,6 +1184,46 @@ class Resource(proto.Message): proto.STRING, number=3, ) + cloud_provider: gcs_resource.CloudProvider = proto.Field( + proto.ENUM, + number=4, + enum=gcs_resource.CloudProvider, + ) + service: str = proto.Field( + proto.STRING, + number=5, + ) + location: str = proto.Field( + proto.STRING, + number=6, + ) + gcp_metadata: gcs_resource.GcpMetadata = proto.Field( + proto.MESSAGE, + number=7, + oneof="cloud_provider_metadata", + message=gcs_resource.GcpMetadata, + ) + aws_metadata: gcs_resource.AwsMetadata = proto.Field( + proto.MESSAGE, + number=8, + oneof="cloud_provider_metadata", + message=gcs_resource.AwsMetadata, + ) + azure_metadata: gcs_resource.AzureMetadata = proto.Field( + proto.MESSAGE, + number=9, + oneof="cloud_provider_metadata", + message=gcs_resource.AzureMetadata, + ) + resource_path: gcs_resource.ResourcePath = proto.Field( + proto.MESSAGE, + number=10, + message=gcs_resource.ResourcePath, + ) + resource_path_string: str = proto.Field( + proto.STRING, + number=11, + ) finding: gcs_finding.Finding = proto.Field( proto.MESSAGE, @@ -1767,8 +1843,12 @@ class UpdateResourceValueConfigRequest(proto.Message): Required. The resource value config being updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - If empty all mutable fields will be updated. + The list of fields to be updated. If empty all mutable + fields will be updated. + + To update nested fields, include the top level field in the + mask For example, to update gcp_metadata.resource_type, + include the "gcp_metadata" field mask """ resource_value_config: gcs_resource_value_config.ResourceValueConfig = proto.Field( diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/simulation.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/simulation.py index 5683d8f4591e..922002db1ae2 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/simulation.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/simulation.py @@ -20,7 +20,7 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.securitycenter_v2.types import valued_resource +from google.cloud.securitycenter_v2.types import resource, valued_resource __protobuf__ = proto.module( package="google.cloud.securitycenter.v2", @@ -43,6 +43,9 @@ class Simulation(proto.Message): resource_value_configs_metadata (MutableSequence[google.cloud.securitycenter_v2.types.ResourceValueConfigMetadata]): Resource value configurations' metadata used in this simulation. Maximum of 100. + cloud_provider (google.cloud.securitycenter_v2.types.CloudProvider): + Indicates which cloud provider was used in + this simulation. """ name: str = proto.Field( @@ -61,6 +64,11 @@ class Simulation(proto.Message): number=3, message=valued_resource.ResourceValueConfigMetadata, ) + cloud_provider: resource.CloudProvider = proto.Field( + proto.ENUM, + number=4, + enum=resource.CloudProvider, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/toxic_combination.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/toxic_combination.py new file mode 100644 index 000000000000..3b12e64bf6bc --- /dev/null +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/types/toxic_combination.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.securitycenter.v2", + manifest={ + "ToxicCombination", + }, +) + + +class ToxicCombination(proto.Message): + r"""Contains details about a group of security issues that, when + the issues occur together, represent a greater risk than when + the issues occur independently. A group of such issues is + referred to as a toxic combination. + + Attributes: + attack_exposure_score (float): + The `Attack exposure + score `__ + of this toxic combination. The score is a measure of how + much this toxic combination exposes one or more high-value + resources to potential attack. + related_findings (MutableSequence[str]): + List of resource names of findings associated with this + toxic combination. For example, + ``organizations/123/sources/456/findings/789``. + """ + + attack_exposure_score: float = proto.Field( + proto.DOUBLE, + number=1, + ) + related_findings: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycenter/noxfile.py b/packages/google-cloud-securitycenter/noxfile.py index 1e6cd48d0529..67b7265f7586 100644 --- a/packages/google-cloud-securitycenter/noxfile.py +++ b/packages/google-cloud-securitycenter/noxfile.py @@ -160,14 +160,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -181,15 +195,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -358,9 +369,16 @@ def docfx(session): @nox.session(python="3.12") -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -397,9 +415,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -425,4 +443,10 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json index 278ec40bb216..7fa250638bd3 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "0.1.0" + "version": "1.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json index a4c1295eaf99..0ed65184da36 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "0.1.0" + "version": "1.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json index 774592ce11f7..4e368637c88c 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "0.1.0" + "version": "1.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json index 6672c033126b..130474c1b2a3 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "0.1.0" + "version": "1.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py index c87ed051c8da..7762f8a850e5 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py @@ -89,6 +89,7 @@ security_health_analytics_custom_config, ) from google.cloud.securitycenter_v1.types import ( + group_membership, iam_binding, indicator, kernel_rootkit, @@ -102,6 +103,7 @@ security_posture, securitycenter_service, ) +from google.cloud.securitycenter_v1.types import toxic_combination, vulnerability from google.cloud.securitycenter_v1.types import external_system as gcs_external_system from google.cloud.securitycenter_v1.types import ( notification_config as gcs_notification_config, @@ -127,7 +129,6 @@ from google.cloud.securitycenter_v1.types import security_marks from google.cloud.securitycenter_v1.types import source from google.cloud.securitycenter_v1.types import source as gcs_source -from google.cloud.securitycenter_v1.types import vulnerability def client_cert_source_callback(): @@ -8551,13 +8552,13 @@ def test_group_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9132,13 +9133,13 @@ def test_group_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9621,13 +9622,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10266,15 +10267,15 @@ def test_list_descendant_security_health_analytics_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_descendant_security_health_analytics_custom_modules( request={} ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10786,13 +10787,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11380,13 +11381,13 @@ def test_list_mute_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_mute_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11973,13 +11974,13 @@ def test_list_notification_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_notification_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12620,15 +12621,15 @@ def test_list_effective_security_health_analytics_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_effective_security_health_analytics_custom_modules( request={} ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13264,13 +13265,13 @@ def test_list_security_health_analytics_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_security_health_analytics_custom_modules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13848,13 +13849,13 @@ def test_list_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -21610,13 +21611,13 @@ def test_list_big_query_exports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_big_query_exports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -23243,6 +23244,11 @@ def test_create_finding_rest(request_type): "last_author": "last_author_value", "notebook_update_time": {}, }, + "toxic_combination": { + "attack_exposure_score": 0.2253, + "related_findings": ["related_findings_value1", "related_findings_value2"], + }, + "group_memberships": [{"group_type": 1, "group_id": "group_id_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -34472,6 +34478,11 @@ def test_update_finding_rest(request_type): "last_author": "last_author_value", "notebook_update_time": {}, }, + "toxic_combination": { + "attack_exposure_score": 0.2253, + "related_findings": ["related_findings_value1", "related_findings_value2"], + }, + "group_memberships": [{"group_type": 1, "group_id": "group_id_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py index 183a8c713675..3482d84b0330 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py @@ -3436,13 +3436,13 @@ def test_group_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4012,13 +4012,13 @@ def test_group_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4501,13 +4501,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4996,13 +4996,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5558,13 +5558,13 @@ def test_list_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py index 492a1027e77d..c4f0d6ac7521 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py @@ -4709,13 +4709,13 @@ def test_group_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5290,13 +5290,13 @@ def test_group_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5861,13 +5861,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6438,13 +6438,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7035,13 +7035,13 @@ def test_list_notification_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_notification_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7607,13 +7607,13 @@ def test_list_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py index 0494cd96b11f..2bcf99cc136e 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py @@ -46,6 +46,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format @@ -73,6 +74,7 @@ attack_path, backup_disaster_recovery, bigquery_export, + cloud_armor, cloud_dlp_data_profile, cloud_dlp_inspection, compliance, @@ -83,6 +85,7 @@ exfiltration, ) from google.cloud.securitycenter_v2.types import ( + group_membership, iam_binding, indicator, kernel_rootkit, @@ -97,6 +100,11 @@ securitycenter_service, simulation, ) +from google.cloud.securitycenter_v2.types import ( + toxic_combination, + valued_resource, + vulnerability, +) from google.cloud.securitycenter_v2.types import external_system as gcs_external_system from google.cloud.securitycenter_v2.types import ( notification_config as gcs_notification_config, @@ -111,13 +119,13 @@ from google.cloud.securitycenter_v2.types import finding as gcs_finding from google.cloud.securitycenter_v2.types import mute_config from google.cloud.securitycenter_v2.types import mute_config as gcs_mute_config +from google.cloud.securitycenter_v2.types import notebook from google.cloud.securitycenter_v2.types import notification_config -from google.cloud.securitycenter_v2.types import org_policy, process +from google.cloud.securitycenter_v2.types import org_policy, process, resource from google.cloud.securitycenter_v2.types import resource_value_config from google.cloud.securitycenter_v2.types import security_marks from google.cloud.securitycenter_v2.types import source from google.cloud.securitycenter_v2.types import source as gcs_source -from google.cloud.securitycenter_v2.types import valued_resource, vulnerability def client_cert_source_callback(): @@ -6133,6 +6141,7 @@ def test_get_simulation(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = simulation.Simulation( name="name_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) response = client.get_simulation(request) @@ -6145,6 +6154,7 @@ def test_get_simulation(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, simulation.Simulation) assert response.name == "name_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM def test_get_simulation_empty_call(): @@ -6244,6 +6254,7 @@ async def test_get_simulation_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( simulation.Simulation( name="name_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) ) response = await client.get_simulation() @@ -6318,6 +6329,7 @@ async def test_get_simulation_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( simulation.Simulation( name="name_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) ) response = await client.get_simulation(request) @@ -6331,6 +6343,7 @@ async def test_get_simulation_async( # Establish that the response is the type that we expect. assert isinstance(response, simulation.Simulation) assert response.name == "name_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM @pytest.mark.asyncio @@ -8121,6 +8134,7 @@ def test_get_resource_value_config(request_type, transport: str = "grpc"): resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) response = client.get_resource_value_config(request) @@ -8138,6 +8152,7 @@ def test_get_resource_value_config(request_type, transport: str = "grpc"): assert response.resource_type == "resource_type_value" assert response.scope == "scope_value" assert response.description == "description_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM def test_get_resource_value_config_empty_call(): @@ -8253,6 +8268,7 @@ async def test_get_resource_value_config_empty_call_async(): resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) ) response = await client.get_resource_value_config() @@ -8334,6 +8350,7 @@ async def test_get_resource_value_config_async( resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) ) response = await client.get_resource_value_config(request) @@ -8352,6 +8369,7 @@ async def test_get_resource_value_config_async( assert response.resource_type == "resource_type_value" assert response.scope == "scope_value" assert response.description == "description_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM @pytest.mark.asyncio @@ -9317,13 +9335,13 @@ def test_group_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.group_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9903,13 +9921,13 @@ def test_list_attack_paths_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_attack_paths(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10496,13 +10514,13 @@ def test_list_big_query_exports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_big_query_exports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11073,13 +11091,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11672,10 +11690,10 @@ def test_list_mute_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_mute_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12262,13 +12280,13 @@ def test_list_notification_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_notification_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12859,13 +12877,13 @@ def test_list_resource_value_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_resource_value_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13431,13 +13449,13 @@ def test_list_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14027,13 +14045,13 @@ def test_list_valued_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_valued_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -18080,6 +18098,7 @@ def test_update_resource_value_config(request_type, transport: str = "grpc"): resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) response = client.update_resource_value_config(request) @@ -18097,6 +18116,7 @@ def test_update_resource_value_config(request_type, transport: str = "grpc"): assert response.resource_type == "resource_type_value" assert response.scope == "scope_value" assert response.description == "description_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM def test_update_resource_value_config_empty_call(): @@ -18208,6 +18228,7 @@ async def test_update_resource_value_config_empty_call_async(): resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) ) response = await client.update_resource_value_config() @@ -18289,6 +18310,7 @@ async def test_update_resource_value_config_async( resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) ) response = await client.update_resource_value_config(request) @@ -18307,6 +18329,7 @@ async def test_update_resource_value_config_async( assert response.resource_type == "resource_type_value" assert response.scope == "scope_value" assert response.description == "description_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM @pytest.mark.asyncio @@ -20648,6 +20671,38 @@ def test_create_finding_rest(request_type): } ], "load_balancers": [{"name": "name_value"}], + "cloud_armor": { + "security_policy": { + "name": "name_value", + "type_": "type__value", + "preview": True, + }, + "requests": { + "ratio": 0.543, + "short_term_allowed": 1934, + "long_term_allowed": 1806, + "long_term_denied": 1679, + }, + "adaptive_protection": {"confidence": 0.1038}, + "attack": { + "volume_pps": 1098, + "volume_bps": 1084, + "classification": "classification_value", + }, + "threat_vector": "threat_vector_value", + "duration": {"seconds": 751, "nanos": 543}, + }, + "notebook": { + "name": "name_value", + "service": "service_value", + "last_author": "last_author_value", + "notebook_update_time": {}, + }, + "toxic_combination": { + "attack_exposure_score": 0.2253, + "related_findings": ["related_findings_value1", "related_findings_value2"], + }, + "group_memberships": [{"group_type": 1, "group_id": "group_id_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -23815,6 +23870,7 @@ def test_get_simulation_rest(request_type): # Designate an appropriate value for the returned response. return_value = simulation.Simulation( name="name_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) # Wrap the value into a proper Response obj @@ -23831,6 +23887,7 @@ def test_get_simulation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, simulation.Simulation) assert response.name == "name_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM def test_get_simulation_rest_use_cached_wrapped_rpc(): @@ -25376,6 +25433,7 @@ def test_get_resource_value_config_rest(request_type): resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) # Wrap the value into a proper Response obj @@ -25397,6 +25455,7 @@ def test_get_resource_value_config_rest(request_type): assert response.resource_type == "resource_type_value" assert response.scope == "scope_value" assert response.description == "description_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM def test_get_resource_value_config_rest_use_cached_wrapped_rpc(): @@ -31877,6 +31936,38 @@ def test_update_finding_rest(request_type): } ], "load_balancers": [{"name": "name_value"}], + "cloud_armor": { + "security_policy": { + "name": "name_value", + "type_": "type__value", + "preview": True, + }, + "requests": { + "ratio": 0.543, + "short_term_allowed": 1934, + "long_term_allowed": 1806, + "long_term_denied": 1679, + }, + "adaptive_protection": {"confidence": 0.1038}, + "attack": { + "volume_pps": 1098, + "volume_bps": 1084, + "classification": "classification_value", + }, + "threat_vector": "threat_vector_value", + "duration": {"seconds": 751, "nanos": 543}, + }, + "notebook": { + "name": "name_value", + "service": "service_value", + "last_author": "last_author_value", + "notebook_update_time": {}, + }, + "toxic_combination": { + "attack_exposure_score": 0.2253, + "related_findings": ["related_findings_value1", "related_findings_value2"], + }, + "group_memberships": [{"group_type": 1, "group_id": "group_id_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -33100,6 +33191,7 @@ def test_update_resource_value_config_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "cloud_provider": 1, "sensitive_data_protection_mapping": { "high_sensitivity_mapping": 1, "medium_sensitivity_mapping": 1, @@ -33188,6 +33280,7 @@ def get_message_fields(field): resource_type="resource_type_value", scope="scope_value", description="description_value", + cloud_provider=resource.CloudProvider.GOOGLE_CLOUD_PLATFORM, ) # Wrap the value into a proper Response obj @@ -33209,6 +33302,7 @@ def get_message_fields(field): assert response.resource_type == "resource_type_value" assert response.scope == "scope_value" assert response.description == "description_value" + assert response.cloud_provider == resource.CloudProvider.GOOGLE_CLOUD_PLATFORM def test_update_resource_value_config_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-cloud-securitycentermanagement/CHANGELOG.md b/packages/google-cloud-securitycentermanagement/CHANGELOG.md index 28784dae2002..e7742fff0ec7 100644 --- a/packages/google-cloud-securitycentermanagement/CHANGELOG.md +++ b/packages/google-cloud-securitycentermanagement/CHANGELOG.md @@ -1,5 +1,39 @@ # Changelog +## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.11...google-cloud-securitycentermanagement-v0.1.12) (2024-06-24) + + +### Features + +* add `show_eligible_modules_only` field to `GetSecurityCenterServiceRequest` message ([2e0f94e](https://github.com/googleapis/google-cloud-python/commit/2e0f94e0f96054a884af7fe8ae80612e04faa91a)) +* add `TOXIC_COMBINATION` to `FindingClass` enum ([2e0f94e](https://github.com/googleapis/google-cloud-python/commit/2e0f94e0f96054a884af7fe8ae80612e04faa91a)) + +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.10...google-cloud-securitycentermanagement-v0.1.11) (2024-06-19) + + +### Features + +* add an INGEST_ONLY EnablementState ([5363fa3](https://github.com/googleapis/google-cloud-python/commit/5363fa34a5c2bb524321d0b09c5f467e784ddb3c)) + +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.9...google-cloud-securitycentermanagement-v0.1.10) (2024-06-10) + + +### Documentation + +* minor docs formatting in `UpdateSecurityCenterServiceRequest.validate_only` ([01e36a7](https://github.com/googleapis/google-cloud-python/commit/01e36a7b4a7e58ff48fcf4dc1098f4447a7e70f6)) + +## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.8...google-cloud-securitycentermanagement-v0.1.9) (2024-05-27) + + +### Features + +* add support for new Security Center Management APIs ([9896255](https://github.com/googleapis/google-cloud-python/commit/98962551bbe4c8901950a9769c7d5fd4369f2ef5)) + + +### Documentation + +* update comment formatting throughout API ([9896255](https://github.com/googleapis/google-cloud-python/commit/98962551bbe4c8901950a9769c7d5fd4369f2ef5)) + ## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.7...google-cloud-securitycentermanagement-v0.1.8) (2024-03-22) diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/__init__.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/__init__.py index 7958d222a182..95b9b5d5551c 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/__init__.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/__init__.py @@ -36,6 +36,7 @@ GetEffectiveEventThreatDetectionCustomModuleRequest, GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, GetEventThreatDetectionCustomModuleRequest, + GetSecurityCenterServiceRequest, GetSecurityHealthAnalyticsCustomModuleRequest, ListDescendantEventThreatDetectionCustomModulesRequest, ListDescendantEventThreatDetectionCustomModulesResponse, @@ -47,13 +48,17 @@ ListEffectiveSecurityHealthAnalyticsCustomModulesResponse, ListEventThreatDetectionCustomModulesRequest, ListEventThreatDetectionCustomModulesResponse, + ListSecurityCenterServicesRequest, + ListSecurityCenterServicesResponse, ListSecurityHealthAnalyticsCustomModulesRequest, ListSecurityHealthAnalyticsCustomModulesResponse, + SecurityCenterService, SecurityHealthAnalyticsCustomModule, SimulatedFinding, SimulateSecurityHealthAnalyticsCustomModuleRequest, SimulateSecurityHealthAnalyticsCustomModuleResponse, UpdateEventThreatDetectionCustomModuleRequest, + UpdateSecurityCenterServiceRequest, UpdateSecurityHealthAnalyticsCustomModuleRequest, ValidateEventThreatDetectionCustomModuleRequest, ValidateEventThreatDetectionCustomModuleResponse, @@ -73,6 +78,7 @@ "GetEffectiveEventThreatDetectionCustomModuleRequest", "GetEffectiveSecurityHealthAnalyticsCustomModuleRequest", "GetEventThreatDetectionCustomModuleRequest", + "GetSecurityCenterServiceRequest", "GetSecurityHealthAnalyticsCustomModuleRequest", "ListDescendantEventThreatDetectionCustomModulesRequest", "ListDescendantEventThreatDetectionCustomModulesResponse", @@ -84,13 +90,17 @@ "ListEffectiveSecurityHealthAnalyticsCustomModulesResponse", "ListEventThreatDetectionCustomModulesRequest", "ListEventThreatDetectionCustomModulesResponse", + "ListSecurityCenterServicesRequest", + "ListSecurityCenterServicesResponse", "ListSecurityHealthAnalyticsCustomModulesRequest", "ListSecurityHealthAnalyticsCustomModulesResponse", + "SecurityCenterService", "SecurityHealthAnalyticsCustomModule", "SimulatedFinding", "SimulateSecurityHealthAnalyticsCustomModuleRequest", "SimulateSecurityHealthAnalyticsCustomModuleResponse", "UpdateEventThreatDetectionCustomModuleRequest", + "UpdateSecurityCenterServiceRequest", "UpdateSecurityHealthAnalyticsCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleResponse", diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py index 558c8aab67c5..17bbab4c1877 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/__init__.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/__init__.py index cc27c8a38f21..738ef14cc631 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/__init__.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/__init__.py @@ -34,6 +34,7 @@ GetEffectiveEventThreatDetectionCustomModuleRequest, GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, GetEventThreatDetectionCustomModuleRequest, + GetSecurityCenterServiceRequest, GetSecurityHealthAnalyticsCustomModuleRequest, ListDescendantEventThreatDetectionCustomModulesRequest, ListDescendantEventThreatDetectionCustomModulesResponse, @@ -45,13 +46,17 @@ ListEffectiveSecurityHealthAnalyticsCustomModulesResponse, ListEventThreatDetectionCustomModulesRequest, ListEventThreatDetectionCustomModulesResponse, + ListSecurityCenterServicesRequest, + ListSecurityCenterServicesResponse, ListSecurityHealthAnalyticsCustomModulesRequest, ListSecurityHealthAnalyticsCustomModulesResponse, + SecurityCenterService, SecurityHealthAnalyticsCustomModule, SimulatedFinding, SimulateSecurityHealthAnalyticsCustomModuleRequest, SimulateSecurityHealthAnalyticsCustomModuleResponse, UpdateEventThreatDetectionCustomModuleRequest, + UpdateSecurityCenterServiceRequest, UpdateSecurityHealthAnalyticsCustomModuleRequest, ValidateEventThreatDetectionCustomModuleRequest, ValidateEventThreatDetectionCustomModuleResponse, @@ -70,6 +75,7 @@ "GetEffectiveEventThreatDetectionCustomModuleRequest", "GetEffectiveSecurityHealthAnalyticsCustomModuleRequest", "GetEventThreatDetectionCustomModuleRequest", + "GetSecurityCenterServiceRequest", "GetSecurityHealthAnalyticsCustomModuleRequest", "ListDescendantEventThreatDetectionCustomModulesRequest", "ListDescendantEventThreatDetectionCustomModulesResponse", @@ -81,14 +87,18 @@ "ListEffectiveSecurityHealthAnalyticsCustomModulesResponse", "ListEventThreatDetectionCustomModulesRequest", "ListEventThreatDetectionCustomModulesResponse", + "ListSecurityCenterServicesRequest", + "ListSecurityCenterServicesResponse", "ListSecurityHealthAnalyticsCustomModulesRequest", "ListSecurityHealthAnalyticsCustomModulesResponse", "SecurityCenterManagementClient", + "SecurityCenterService", "SecurityHealthAnalyticsCustomModule", "SimulateSecurityHealthAnalyticsCustomModuleRequest", "SimulateSecurityHealthAnalyticsCustomModuleResponse", "SimulatedFinding", "UpdateEventThreatDetectionCustomModuleRequest", + "UpdateSecurityCenterServiceRequest", "UpdateSecurityHealthAnalyticsCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleResponse", diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_metadata.json b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_metadata.json index bc19f51f6a72..c0e4de2bab59 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_metadata.json +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_metadata.json @@ -45,6 +45,11 @@ "get_event_threat_detection_custom_module" ] }, + "GetSecurityCenterService": { + "methods": [ + "get_security_center_service" + ] + }, "GetSecurityHealthAnalyticsCustomModule": { "methods": [ "get_security_health_analytics_custom_module" @@ -75,6 +80,11 @@ "list_event_threat_detection_custom_modules" ] }, + "ListSecurityCenterServices": { + "methods": [ + "list_security_center_services" + ] + }, "ListSecurityHealthAnalyticsCustomModules": { "methods": [ "list_security_health_analytics_custom_modules" @@ -90,6 +100,11 @@ "update_event_threat_detection_custom_module" ] }, + "UpdateSecurityCenterService": { + "methods": [ + "update_security_center_service" + ] + }, "UpdateSecurityHealthAnalyticsCustomModule": { "methods": [ "update_security_health_analytics_custom_module" @@ -140,6 +155,11 @@ "get_event_threat_detection_custom_module" ] }, + "GetSecurityCenterService": { + "methods": [ + "get_security_center_service" + ] + }, "GetSecurityHealthAnalyticsCustomModule": { "methods": [ "get_security_health_analytics_custom_module" @@ -170,6 +190,11 @@ "list_event_threat_detection_custom_modules" ] }, + "ListSecurityCenterServices": { + "methods": [ + "list_security_center_services" + ] + }, "ListSecurityHealthAnalyticsCustomModules": { "methods": [ "list_security_health_analytics_custom_modules" @@ -185,6 +210,11 @@ "update_event_threat_detection_custom_module" ] }, + "UpdateSecurityCenterService": { + "methods": [ + "update_security_center_service" + ] + }, "UpdateSecurityHealthAnalyticsCustomModule": { "methods": [ "update_security_health_analytics_custom_module" @@ -235,6 +265,11 @@ "get_event_threat_detection_custom_module" ] }, + "GetSecurityCenterService": { + "methods": [ + "get_security_center_service" + ] + }, "GetSecurityHealthAnalyticsCustomModule": { "methods": [ "get_security_health_analytics_custom_module" @@ -265,6 +300,11 @@ "list_event_threat_detection_custom_modules" ] }, + "ListSecurityCenterServices": { + "methods": [ + "list_security_center_services" + ] + }, "ListSecurityHealthAnalyticsCustomModules": { "methods": [ "list_security_health_analytics_custom_modules" @@ -280,6 +320,11 @@ "update_event_threat_detection_custom_module" ] }, + "UpdateSecurityCenterService": { + "methods": [ + "update_security_center_service" + ] + }, "UpdateSecurityHealthAnalyticsCustomModule": { "methods": [ "update_security_health_analytics_custom_module" diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py index 558c8aab67c5..17bbab4c1877 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py index b66ea2201a49..3d4af5708ac4 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py @@ -92,6 +92,12 @@ class SecurityCenterManagementAsyncClient: ) finding_path = staticmethod(SecurityCenterManagementClient.finding_path) parse_finding_path = staticmethod(SecurityCenterManagementClient.parse_finding_path) + security_center_service_path = staticmethod( + SecurityCenterManagementClient.security_center_service_path + ) + parse_security_center_service_path = staticmethod( + SecurityCenterManagementClient.parse_security_center_service_path + ) security_health_analytics_custom_module_path = staticmethod( SecurityCenterManagementClient.security_health_analytics_custom_module_path ) @@ -352,12 +358,12 @@ async def sample_list_effective_security_health_analytics_custom_modules(): Security Health Analytics custom modules. parent (:class:`str`): - Required. Name of parent to list - effective custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list effective custom + modules. specified in one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -487,13 +493,12 @@ async def sample_get_effective_security_health_analytics_custom_module(): The request object. Message for getting a EffectiveSecurityHealthAnalyticsCustomModule name (:class:`str`): - Required. The resource name of the SHA custom module. - - Its format is: + Required. The full resource name of the custom module, + specified in one of the following formats: - - "organizations/{organization}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". - - "folders/{folder}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". - - "projects/{project}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". + - ``organizations/organization/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``folders/folder/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``projects/project/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -625,12 +630,13 @@ async def sample_list_security_health_analytics_custom_modules(): The request object. Request message for listing Security Health Analytics custom modules. parent (:class:`str`): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent organization, folder, or + project in which to list custom modules, specified in + one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -761,12 +767,13 @@ async def sample_list_descendant_security_health_analytics_custom_modules(): descendant Security Health Analytics custom modules. parent (:class:`str`): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of the parent organization, folder, or + project in which to list custom modules, specified in + one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1025,12 +1032,13 @@ async def sample_create_security_health_analytics_custom_module(): The request object. Message for creating a SecurityHealthAnalyticsCustomModule parent (:class:`str`): - Required. Name of the parent for the - module. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of the parent organization, folder, or + project of the module, specified in one of the following + formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1319,9 +1327,9 @@ async def sample_delete_security_health_analytics_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". - - "folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". - - "projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". + - ``organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. + - ``folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. + - ``projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1579,12 +1587,11 @@ async def sample_list_effective_event_threat_detection_custom_modules(): The request object. Request message for listing effective Event Threat Detection custom modules. parent (:class:`str`): - Required. Name of parent to list - effective custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list effective custom + modules. Its format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1724,9 +1731,9 @@ async def sample_get_effective_event_threat_detection_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1854,12 +1861,11 @@ async def sample_list_event_threat_detection_custom_modules(): The request object. Request message for listing Event Threat Detection custom modules. parent (:class:`str`): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list custom modules. Its + format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1992,12 +1998,11 @@ async def sample_list_descendant_event_threat_detection_custom_modules(): descendant Event Threat Detection custom modules. parent (:class:`str`): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list custom modules. Its + format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2130,9 +2135,9 @@ async def sample_get_event_threat_detection_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2260,12 +2265,10 @@ async def sample_create_event_threat_detection_custom_module(): The request object. Message for creating a EventThreatDetectionCustomModule parent (:class:`str`): - Required. Name of parent for the - module. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent for the module. Its format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2550,9 +2553,9 @@ async def sample_delete_event_threat_detection_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2708,6 +2711,413 @@ async def sample_validate_event_threat_detection_custom_module(): # Done; return the response. return response + async def get_security_center_service( + self, + request: Optional[ + Union[security_center_management.GetSecurityCenterServiceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.SecurityCenterService: + r"""Gets service settings for the specified Security + Command Center service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import securitycentermanagement_v1 + + async def sample_get_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementAsyncClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.GetSecurityCenterServiceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_center_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.securitycentermanagement_v1.types.GetSecurityCenterServiceRequest, dict]]): + The request object. Request message for getting a + Security Command Center service. + name (:class:`str`): + Required. The Security Command Center service to + retrieve. + + Formats: + + - organizations/{organization}/locations/{location}/securityCenterServices/{service} + - folders/{folder}/locations/{location}/securityCenterServices/{service} + - projects/{project}/locations/{location}/securityCenterServices/{service} + + The possible values for id {service} are: + + - container-threat-detection + - event-threat-detection + - security-health-analytics + - vm-threat-detection + - web-security-scanner + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.securitycentermanagement_v1.types.SecurityCenterService: + Represents a particular Security + Command Center service. This includes + settings information such as top-level + enablement in addition to individual + module settings. Service settings can be + configured at the organization, folder, + or project level. Service settings at + the organization or folder level are + inherited by those in child folders and + projects. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_center_management.GetSecurityCenterServiceRequest + ): + request = security_center_management.GetSecurityCenterServiceRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_security_center_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_security_center_services( + self, + request: Optional[ + Union[security_center_management.ListSecurityCenterServicesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSecurityCenterServicesAsyncPager: + r"""Returns a list of all Security Command Center + services for the given parent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import securitycentermanagement_v1 + + async def sample_list_security_center_services(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementAsyncClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.ListSecurityCenterServicesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_center_services(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesRequest, dict]]): + The request object. Request message for listing Security + Command Center services. + parent (:class:`str`): + Required. The name of the parent to list Security + Command Center services. + + Formats: + + - organizations/{organization}/locations/{location} + - folders/{folder}/locations/{location} + - projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.securitycentermanagement_v1.services.security_center_management.pagers.ListSecurityCenterServicesAsyncPager: + Response message for listing Security + Command Center services. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_center_management.ListSecurityCenterServicesRequest + ): + request = security_center_management.ListSecurityCenterServicesRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_security_center_services + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSecurityCenterServicesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_security_center_service( + self, + request: Optional[ + Union[security_center_management.UpdateSecurityCenterServiceRequest, dict] + ] = None, + *, + security_center_service: Optional[ + security_center_management.SecurityCenterService + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.SecurityCenterService: + r"""Updates a Security Command Center service using the + given update mask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import securitycentermanagement_v1 + + async def sample_update_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementAsyncClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.UpdateSecurityCenterServiceRequest( + ) + + # Make the request + response = await client.update_security_center_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.securitycentermanagement_v1.types.UpdateSecurityCenterServiceRequest, dict]]): + The request object. Request message for updating a + Security Command Center service. + security_center_service (:class:`google.cloud.securitycentermanagement_v1.types.SecurityCenterService`): + Required. The updated service. + This corresponds to the ``security_center_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Possible + values: + + - "intended_enablement_state" + - "modules" + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.securitycentermanagement_v1.types.SecurityCenterService: + Represents a particular Security + Command Center service. This includes + settings information such as top-level + enablement in addition to individual + module settings. Service settings can be + configured at the organization, folder, + or project level. Service settings at + the organization or folder level are + inherited by those in child folders and + projects. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([security_center_service, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_center_management.UpdateSecurityCenterServiceRequest + ): + request = security_center_management.UpdateSecurityCenterServiceRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_center_service is not None: + request.security_center_service = security_center_service + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_security_center_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "security_center_service.name", + request.security_center_service.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py index d6c0738a6290..c54e80c8e4a8 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py @@ -284,6 +284,28 @@ def parse_finding_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def security_center_service_path( + project: str, + location: str, + service: str, + ) -> str: + """Returns a fully-qualified security_center_service string.""" + return "projects/{project}/locations/{location}/securityCenterServices/{service}".format( + project=project, + location=location, + service=service, + ) + + @staticmethod + def parse_security_center_service_path(path: str) -> Dict[str, str]: + """Parses a security_center_service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/securityCenterServices/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def security_health_analytics_custom_module_path( organization: str, @@ -840,12 +862,12 @@ def sample_list_effective_security_health_analytics_custom_modules(): Security Health Analytics custom modules. parent (str): - Required. Name of parent to list - effective custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list effective custom + modules. specified in one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -974,13 +996,12 @@ def sample_get_effective_security_health_analytics_custom_module(): The request object. Message for getting a EffectiveSecurityHealthAnalyticsCustomModule name (str): - Required. The resource name of the SHA custom module. + Required. The full resource name of the custom module, + specified in one of the following formats: - Its format is: - - - "organizations/{organization}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". - - "folders/{folder}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". - - "projects/{project}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". + - ``organizations/organization/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``folders/folder/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``projects/project/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1111,12 +1132,13 @@ def sample_list_security_health_analytics_custom_modules(): The request object. Request message for listing Security Health Analytics custom modules. parent (str): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent organization, folder, or + project in which to list custom modules, specified in + one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1246,12 +1268,13 @@ def sample_list_descendant_security_health_analytics_custom_modules(): descendant Security Health Analytics custom modules. parent (str): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of the parent organization, folder, or + project in which to list custom modules, specified in + one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1508,12 +1531,13 @@ def sample_create_security_health_analytics_custom_module(): The request object. Message for creating a SecurityHealthAnalyticsCustomModule parent (str): - Required. Name of the parent for the - module. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of the parent organization, folder, or + project of the module, specified in one of the following + formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1800,9 +1824,9 @@ def sample_delete_security_health_analytics_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". - - "folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". - - "projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". + - ``organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. + - ``folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. + - ``projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2058,12 +2082,11 @@ def sample_list_effective_event_threat_detection_custom_modules(): The request object. Request message for listing effective Event Threat Detection custom modules. parent (str): - Required. Name of parent to list - effective custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list effective custom + modules. Its format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2202,9 +2225,9 @@ def sample_get_effective_event_threat_detection_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2331,12 +2354,11 @@ def sample_list_event_threat_detection_custom_modules(): The request object. Request message for listing Event Threat Detection custom modules. parent (str): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list custom modules. Its + format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2468,12 +2490,11 @@ def sample_list_descendant_event_threat_detection_custom_modules(): descendant Event Threat Detection custom modules. parent (str): - Required. Name of parent to list - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent to list custom modules. Its + format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2605,9 +2626,9 @@ def sample_get_event_threat_detection_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2734,12 +2755,10 @@ def sample_create_event_threat_detection_custom_module(): The request object. Message for creating a EventThreatDetectionCustomModule parent (str): - Required. Name of parent for the - module. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}" + Required. Name of parent for the module. Its format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3022,9 +3041,9 @@ def sample_delete_event_threat_detection_custom_module(): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3179,6 +3198,410 @@ def sample_validate_event_threat_detection_custom_module(): # Done; return the response. return response + def get_security_center_service( + self, + request: Optional[ + Union[security_center_management.GetSecurityCenterServiceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.SecurityCenterService: + r"""Gets service settings for the specified Security + Command Center service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import securitycentermanagement_v1 + + def sample_get_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.GetSecurityCenterServiceRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_center_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.securitycentermanagement_v1.types.GetSecurityCenterServiceRequest, dict]): + The request object. Request message for getting a + Security Command Center service. + name (str): + Required. The Security Command Center service to + retrieve. + + Formats: + + - organizations/{organization}/locations/{location}/securityCenterServices/{service} + - folders/{folder}/locations/{location}/securityCenterServices/{service} + - projects/{project}/locations/{location}/securityCenterServices/{service} + + The possible values for id {service} are: + + - container-threat-detection + - event-threat-detection + - security-health-analytics + - vm-threat-detection + - web-security-scanner + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.securitycentermanagement_v1.types.SecurityCenterService: + Represents a particular Security + Command Center service. This includes + settings information such as top-level + enablement in addition to individual + module settings. Service settings can be + configured at the organization, folder, + or project level. Service settings at + the organization or folder level are + inherited by those in child folders and + projects. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_center_management.GetSecurityCenterServiceRequest + ): + request = security_center_management.GetSecurityCenterServiceRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_security_center_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_security_center_services( + self, + request: Optional[ + Union[security_center_management.ListSecurityCenterServicesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSecurityCenterServicesPager: + r"""Returns a list of all Security Command Center + services for the given parent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import securitycentermanagement_v1 + + def sample_list_security_center_services(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.ListSecurityCenterServicesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_center_services(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesRequest, dict]): + The request object. Request message for listing Security + Command Center services. + parent (str): + Required. The name of the parent to list Security + Command Center services. + + Formats: + + - organizations/{organization}/locations/{location} + - folders/{folder}/locations/{location} + - projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.securitycentermanagement_v1.services.security_center_management.pagers.ListSecurityCenterServicesPager: + Response message for listing Security + Command Center services. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_center_management.ListSecurityCenterServicesRequest + ): + request = security_center_management.ListSecurityCenterServicesRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_security_center_services + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSecurityCenterServicesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_security_center_service( + self, + request: Optional[ + Union[security_center_management.UpdateSecurityCenterServiceRequest, dict] + ] = None, + *, + security_center_service: Optional[ + security_center_management.SecurityCenterService + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.SecurityCenterService: + r"""Updates a Security Command Center service using the + given update mask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import securitycentermanagement_v1 + + def sample_update_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.UpdateSecurityCenterServiceRequest( + ) + + # Make the request + response = client.update_security_center_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.securitycentermanagement_v1.types.UpdateSecurityCenterServiceRequest, dict]): + The request object. Request message for updating a + Security Command Center service. + security_center_service (google.cloud.securitycentermanagement_v1.types.SecurityCenterService): + Required. The updated service. + This corresponds to the ``security_center_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Possible + values: + + - "intended_enablement_state" + - "modules" + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.securitycentermanagement_v1.types.SecurityCenterService: + Represents a particular Security + Command Center service. This includes + settings information such as top-level + enablement in addition to individual + module settings. Service settings can be + configured at the organization, folder, + or project level. Service settings at + the organization or folder level are + inherited by those in child folders and + projects. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([security_center_service, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_center_management.UpdateSecurityCenterServiceRequest + ): + request = security_center_management.UpdateSecurityCenterServiceRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_center_service is not None: + request.security_center_service = security_center_service + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_security_center_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "security_center_service.name", + request.security_center_service.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "SecurityCenterManagementClient": return self diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/pagers.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/pagers.py index b025ee372009..6417b7bf188d 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/pagers.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/pagers.py @@ -951,3 +951,146 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSecurityCenterServicesPager: + """A pager for iterating through ``list_security_center_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``security_center_services`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSecurityCenterServices`` requests and continue to iterate + through the ``security_center_services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., security_center_management.ListSecurityCenterServicesResponse + ], + request: security_center_management.ListSecurityCenterServicesRequest, + response: security_center_management.ListSecurityCenterServicesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesRequest): + The initial request object. + response (google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = security_center_management.ListSecurityCenterServicesRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[security_center_management.ListSecurityCenterServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[security_center_management.SecurityCenterService]: + for page in self.pages: + yield from page.security_center_services + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSecurityCenterServicesAsyncPager: + """A pager for iterating through ``list_security_center_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``security_center_services`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSecurityCenterServices`` requests and continue to iterate + through the ``security_center_services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[security_center_management.ListSecurityCenterServicesResponse], + ], + request: security_center_management.ListSecurityCenterServicesRequest, + response: security_center_management.ListSecurityCenterServicesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesRequest): + The initial request object. + response (google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = security_center_management.ListSecurityCenterServicesRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[security_center_management.ListSecurityCenterServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[security_center_management.SecurityCenterService]: + async def async_generator(): + async for page in self.pages: + for response in page.security_center_services: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/base.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/base.py index 3a112ef4289c..0f23f0463c60 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/base.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/base.py @@ -338,6 +338,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_security_center_service: gapic_v1.method.wrap_method( + self.get_security_center_service, + default_timeout=None, + client_info=client_info, + ), + self.list_security_center_services: gapic_v1.method.wrap_method( + self.list_security_center_services, + default_timeout=None, + client_info=client_info, + ), + self.update_security_center_service: gapic_v1.method.wrap_method( + self.update_security_center_service, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -591,6 +606,42 @@ def validate_event_threat_detection_custom_module( ]: raise NotImplementedError() + @property + def get_security_center_service( + self, + ) -> Callable[ + [security_center_management.GetSecurityCenterServiceRequest], + Union[ + security_center_management.SecurityCenterService, + Awaitable[security_center_management.SecurityCenterService], + ], + ]: + raise NotImplementedError() + + @property + def list_security_center_services( + self, + ) -> Callable[ + [security_center_management.ListSecurityCenterServicesRequest], + Union[ + security_center_management.ListSecurityCenterServicesResponse, + Awaitable[security_center_management.ListSecurityCenterServicesResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_security_center_service( + self, + ) -> Callable[ + [security_center_management.UpdateSecurityCenterServiceRequest], + Union[ + security_center_management.SecurityCenterService, + Awaitable[security_center_management.SecurityCenterService], + ], + ]: + raise NotImplementedError() + @property def get_location( self, diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc.py index dd52711692bf..acb3569edc83 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc.py @@ -882,6 +882,100 @@ def validate_event_threat_detection_custom_module( ) return self._stubs["validate_event_threat_detection_custom_module"] + @property + def get_security_center_service( + self, + ) -> Callable[ + [security_center_management.GetSecurityCenterServiceRequest], + security_center_management.SecurityCenterService, + ]: + r"""Return a callable for the get security center service method over gRPC. + + Gets service settings for the specified Security + Command Center service. + + Returns: + Callable[[~.GetSecurityCenterServiceRequest], + ~.SecurityCenterService]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_center_service" not in self._stubs: + self._stubs["get_security_center_service"] = self.grpc_channel.unary_unary( + "/google.cloud.securitycentermanagement.v1.SecurityCenterManagement/GetSecurityCenterService", + request_serializer=security_center_management.GetSecurityCenterServiceRequest.serialize, + response_deserializer=security_center_management.SecurityCenterService.deserialize, + ) + return self._stubs["get_security_center_service"] + + @property + def list_security_center_services( + self, + ) -> Callable[ + [security_center_management.ListSecurityCenterServicesRequest], + security_center_management.ListSecurityCenterServicesResponse, + ]: + r"""Return a callable for the list security center services method over gRPC. + + Returns a list of all Security Command Center + services for the given parent. + + Returns: + Callable[[~.ListSecurityCenterServicesRequest], + ~.ListSecurityCenterServicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_center_services" not in self._stubs: + self._stubs[ + "list_security_center_services" + ] = self.grpc_channel.unary_unary( + "/google.cloud.securitycentermanagement.v1.SecurityCenterManagement/ListSecurityCenterServices", + request_serializer=security_center_management.ListSecurityCenterServicesRequest.serialize, + response_deserializer=security_center_management.ListSecurityCenterServicesResponse.deserialize, + ) + return self._stubs["list_security_center_services"] + + @property + def update_security_center_service( + self, + ) -> Callable[ + [security_center_management.UpdateSecurityCenterServiceRequest], + security_center_management.SecurityCenterService, + ]: + r"""Return a callable for the update security center service method over gRPC. + + Updates a Security Command Center service using the + given update mask. + + Returns: + Callable[[~.UpdateSecurityCenterServiceRequest], + ~.SecurityCenterService]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_center_service" not in self._stubs: + self._stubs[ + "update_security_center_service" + ] = self.grpc_channel.unary_unary( + "/google.cloud.securitycentermanagement.v1.SecurityCenterManagement/UpdateSecurityCenterService", + request_serializer=security_center_management.UpdateSecurityCenterServiceRequest.serialize, + response_deserializer=security_center_management.SecurityCenterService.deserialize, + ) + return self._stubs["update_security_center_service"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc_asyncio.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc_asyncio.py index 645dc4aca1ae..f83d907e2b5c 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc_asyncio.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/grpc_asyncio.py @@ -904,6 +904,100 @@ def validate_event_threat_detection_custom_module( ) return self._stubs["validate_event_threat_detection_custom_module"] + @property + def get_security_center_service( + self, + ) -> Callable[ + [security_center_management.GetSecurityCenterServiceRequest], + Awaitable[security_center_management.SecurityCenterService], + ]: + r"""Return a callable for the get security center service method over gRPC. + + Gets service settings for the specified Security + Command Center service. + + Returns: + Callable[[~.GetSecurityCenterServiceRequest], + Awaitable[~.SecurityCenterService]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_center_service" not in self._stubs: + self._stubs["get_security_center_service"] = self.grpc_channel.unary_unary( + "/google.cloud.securitycentermanagement.v1.SecurityCenterManagement/GetSecurityCenterService", + request_serializer=security_center_management.GetSecurityCenterServiceRequest.serialize, + response_deserializer=security_center_management.SecurityCenterService.deserialize, + ) + return self._stubs["get_security_center_service"] + + @property + def list_security_center_services( + self, + ) -> Callable[ + [security_center_management.ListSecurityCenterServicesRequest], + Awaitable[security_center_management.ListSecurityCenterServicesResponse], + ]: + r"""Return a callable for the list security center services method over gRPC. + + Returns a list of all Security Command Center + services for the given parent. + + Returns: + Callable[[~.ListSecurityCenterServicesRequest], + Awaitable[~.ListSecurityCenterServicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_center_services" not in self._stubs: + self._stubs[ + "list_security_center_services" + ] = self.grpc_channel.unary_unary( + "/google.cloud.securitycentermanagement.v1.SecurityCenterManagement/ListSecurityCenterServices", + request_serializer=security_center_management.ListSecurityCenterServicesRequest.serialize, + response_deserializer=security_center_management.ListSecurityCenterServicesResponse.deserialize, + ) + return self._stubs["list_security_center_services"] + + @property + def update_security_center_service( + self, + ) -> Callable[ + [security_center_management.UpdateSecurityCenterServiceRequest], + Awaitable[security_center_management.SecurityCenterService], + ]: + r"""Return a callable for the update security center service method over gRPC. + + Updates a Security Command Center service using the + given update mask. + + Returns: + Callable[[~.UpdateSecurityCenterServiceRequest], + Awaitable[~.SecurityCenterService]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_center_service" not in self._stubs: + self._stubs[ + "update_security_center_service" + ] = self.grpc_channel.unary_unary( + "/google.cloud.securitycentermanagement.v1.SecurityCenterManagement/UpdateSecurityCenterService", + request_serializer=security_center_management.UpdateSecurityCenterServiceRequest.serialize, + response_deserializer=security_center_management.SecurityCenterService.deserialize, + ) + return self._stubs["update_security_center_service"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1117,6 +1211,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_security_center_service: gapic_v1.method_async.wrap_method( + self.get_security_center_service, + default_timeout=None, + client_info=client_info, + ), + self.list_security_center_services: gapic_v1.method_async.wrap_method( + self.list_security_center_services, + default_timeout=None, + client_info=client_info, + ), + self.update_security_center_service: gapic_v1.method_async.wrap_method( + self.update_security_center_service, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/rest.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/rest.py index 3b8eaf788f4e..454987a100c8 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/rest.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/transports/rest.py @@ -114,6 +114,14 @@ def post_get_event_threat_detection_custom_module(self, response): logging.log(f"Received response: {response}") return response + def pre_get_security_center_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_security_center_service(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_security_health_analytics_custom_module(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -162,6 +170,14 @@ def post_list_event_threat_detection_custom_modules(self, response): logging.log(f"Received response: {response}") return response + def pre_list_security_center_services(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_security_center_services(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_security_health_analytics_custom_modules(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -186,6 +202,14 @@ def post_update_event_threat_detection_custom_module(self, response): logging.log(f"Received response: {response}") return response + def pre_update_security_center_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_security_center_service(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_security_health_analytics_custom_module(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -370,6 +394,32 @@ def post_get_event_threat_detection_custom_module( """ return response + def pre_get_security_center_service( + self, + request: security_center_management.GetSecurityCenterServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + security_center_management.GetSecurityCenterServiceRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_security_center_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityCenterManagement server. + """ + return request, metadata + + def post_get_security_center_service( + self, response: security_center_management.SecurityCenterService + ) -> security_center_management.SecurityCenterService: + """Post-rpc interceptor for get_security_center_service + + Override in a subclass to manipulate the response + after it is returned by the SecurityCenterManagement server but before + it is returned to user code. + """ + return response + def pre_get_security_health_analytics_custom_module( self, request: security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, @@ -539,6 +589,32 @@ def post_list_event_threat_detection_custom_modules( """ return response + def pre_list_security_center_services( + self, + request: security_center_management.ListSecurityCenterServicesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + security_center_management.ListSecurityCenterServicesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_security_center_services + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityCenterManagement server. + """ + return request, metadata + + def post_list_security_center_services( + self, response: security_center_management.ListSecurityCenterServicesResponse + ) -> security_center_management.ListSecurityCenterServicesResponse: + """Post-rpc interceptor for list_security_center_services + + Override in a subclass to manipulate the response + after it is returned by the SecurityCenterManagement server but before + it is returned to user code. + """ + return response + def pre_list_security_health_analytics_custom_modules( self, request: security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, @@ -619,6 +695,32 @@ def post_update_event_threat_detection_custom_module( """ return response + def pre_update_security_center_service( + self, + request: security_center_management.UpdateSecurityCenterServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + security_center_management.UpdateSecurityCenterServiceRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_security_center_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityCenterManagement server. + """ + return request, metadata + + def post_update_security_center_service( + self, response: security_center_management.SecurityCenterService + ) -> security_center_management.SecurityCenterService: + """Post-rpc interceptor for update_security_center_service + + Override in a subclass to manipulate the response + after it is returned by the SecurityCenterManagement server but before + it is returned to user code. + """ + return response + def pre_update_security_health_analytics_custom_module( self, request: security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, @@ -1599,6 +1701,115 @@ def __call__( resp = self._interceptor.post_get_event_threat_detection_custom_module(resp) return resp + class _GetSecurityCenterService(SecurityCenterManagementRestStub): + def __hash__(self): + return hash("GetSecurityCenterService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: security_center_management.GetSecurityCenterServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.SecurityCenterService: + r"""Call the get security center + service method over HTTP. + + Args: + request (~.security_center_management.GetSecurityCenterServiceRequest): + The request object. Request message for getting a + Security Command Center service. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.security_center_management.SecurityCenterService: + Represents a particular Security + Command Center service. This includes + settings information such as top-level + enablement in addition to individual + module settings. Service settings can be + configured at the organization, folder, + or project level. Service settings at + the organization or folder level are + inherited by those in child folders and + projects. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/securityCenterServices/*}", + }, + { + "method": "get", + "uri": "/v1/{name=folders/*/locations/*/securityCenterServices/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/securityCenterServices/*}", + }, + ] + request, metadata = self._interceptor.pre_get_security_center_service( + request, metadata + ) + pb_request = security_center_management.GetSecurityCenterServiceRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_center_management.SecurityCenterService() + pb_resp = security_center_management.SecurityCenterService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_security_center_service(resp) + return resp + class _GetSecurityHealthAnalyticsCustomModule(SecurityCenterManagementRestStub): def __hash__(self): return hash("GetSecurityHealthAnalyticsCustomModule") @@ -2288,6 +2499,109 @@ def __call__( ) return resp + class _ListSecurityCenterServices(SecurityCenterManagementRestStub): + def __hash__(self): + return hash("ListSecurityCenterServices") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: security_center_management.ListSecurityCenterServicesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.ListSecurityCenterServicesResponse: + r"""Call the list security center + services method over HTTP. + + Args: + request (~.security_center_management.ListSecurityCenterServicesRequest): + The request object. Request message for listing Security + Command Center services. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.security_center_management.ListSecurityCenterServicesResponse: + Response message for listing Security + Command Center services. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/securityCenterServices", + }, + { + "method": "get", + "uri": "/v1/{parent=folders/*/locations/*}/securityCenterServices", + }, + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/securityCenterServices", + }, + ] + request, metadata = self._interceptor.pre_list_security_center_services( + request, metadata + ) + pb_request = ( + security_center_management.ListSecurityCenterServicesRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_center_management.ListSecurityCenterServicesResponse() + pb_resp = security_center_management.ListSecurityCenterServicesResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_security_center_services(resp) + return resp + class _ListSecurityHealthAnalyticsCustomModules(SecurityCenterManagementRestStub): def __hash__(self): return hash("ListSecurityHealthAnalyticsCustomModules") @@ -2651,6 +2965,128 @@ def __call__( ) return resp + class _UpdateSecurityCenterService(SecurityCenterManagementRestStub): + def __hash__(self): + return hash("UpdateSecurityCenterService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: security_center_management.UpdateSecurityCenterServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_center_management.SecurityCenterService: + r"""Call the update security center + service method over HTTP. + + Args: + request (~.security_center_management.UpdateSecurityCenterServiceRequest): + The request object. Request message for updating a + Security Command Center service. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.security_center_management.SecurityCenterService: + Represents a particular Security + Command Center service. This includes + settings information such as top-level + enablement in addition to individual + module settings. Service settings can be + configured at the organization, folder, + or project level. Service settings at + the organization or folder level are + inherited by those in child folders and + projects. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{security_center_service.name=projects/*/locations/*/securityCenterServices/*}", + "body": "security_center_service", + }, + { + "method": "patch", + "uri": "/v1/{security_center_service.name=folders/*/locations/*/securityCenterServices/*}", + "body": "security_center_service", + }, + { + "method": "patch", + "uri": "/v1/{security_center_service.name=organizations/*/locations/*/securityCenterServices/*}", + "body": "security_center_service", + }, + ] + request, metadata = self._interceptor.pre_update_security_center_service( + request, metadata + ) + pb_request = ( + security_center_management.UpdateSecurityCenterServiceRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_center_management.SecurityCenterService() + pb_resp = security_center_management.SecurityCenterService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_security_center_service(resp) + return resp + class _UpdateSecurityHealthAnalyticsCustomModule(SecurityCenterManagementRestStub): def __hash__(self): return hash("UpdateSecurityHealthAnalyticsCustomModule") @@ -2982,6 +3418,17 @@ def get_event_threat_detection_custom_module( # In C++ this would require a dynamic_cast return self._GetEventThreatDetectionCustomModule(self._session, self._host, self._interceptor) # type: ignore + @property + def get_security_center_service( + self, + ) -> Callable[ + [security_center_management.GetSecurityCenterServiceRequest], + security_center_management.SecurityCenterService, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSecurityCenterService(self._session, self._host, self._interceptor) # type: ignore + @property def get_security_health_analytics_custom_module( self, @@ -3056,6 +3503,17 @@ def list_event_threat_detection_custom_modules( # In C++ this would require a dynamic_cast return self._ListEventThreatDetectionCustomModules(self._session, self._host, self._interceptor) # type: ignore + @property + def list_security_center_services( + self, + ) -> Callable[ + [security_center_management.ListSecurityCenterServicesRequest], + security_center_management.ListSecurityCenterServicesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSecurityCenterServices(self._session, self._host, self._interceptor) # type: ignore + @property def list_security_health_analytics_custom_modules( self, @@ -3089,6 +3547,17 @@ def update_event_threat_detection_custom_module( # In C++ this would require a dynamic_cast return self._UpdateEventThreatDetectionCustomModule(self._session, self._host, self._interceptor) # type: ignore + @property + def update_security_center_service( + self, + ) -> Callable[ + [security_center_management.UpdateSecurityCenterServiceRequest], + security_center_management.SecurityCenterService, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSecurityCenterService(self._session, self._host, self._interceptor) # type: ignore + @property def update_security_health_analytics_custom_module( self, diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/__init__.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/__init__.py index 80f7a171e41c..2f11cc8f5e3b 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/__init__.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/__init__.py @@ -25,6 +25,7 @@ GetEffectiveEventThreatDetectionCustomModuleRequest, GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, GetEventThreatDetectionCustomModuleRequest, + GetSecurityCenterServiceRequest, GetSecurityHealthAnalyticsCustomModuleRequest, ListDescendantEventThreatDetectionCustomModulesRequest, ListDescendantEventThreatDetectionCustomModulesResponse, @@ -36,13 +37,17 @@ ListEffectiveSecurityHealthAnalyticsCustomModulesResponse, ListEventThreatDetectionCustomModulesRequest, ListEventThreatDetectionCustomModulesResponse, + ListSecurityCenterServicesRequest, + ListSecurityCenterServicesResponse, ListSecurityHealthAnalyticsCustomModulesRequest, ListSecurityHealthAnalyticsCustomModulesResponse, + SecurityCenterService, SecurityHealthAnalyticsCustomModule, SimulatedFinding, SimulateSecurityHealthAnalyticsCustomModuleRequest, SimulateSecurityHealthAnalyticsCustomModuleResponse, UpdateEventThreatDetectionCustomModuleRequest, + UpdateSecurityCenterServiceRequest, UpdateSecurityHealthAnalyticsCustomModuleRequest, ValidateEventThreatDetectionCustomModuleRequest, ValidateEventThreatDetectionCustomModuleResponse, @@ -60,6 +65,7 @@ "GetEffectiveEventThreatDetectionCustomModuleRequest", "GetEffectiveSecurityHealthAnalyticsCustomModuleRequest", "GetEventThreatDetectionCustomModuleRequest", + "GetSecurityCenterServiceRequest", "GetSecurityHealthAnalyticsCustomModuleRequest", "ListDescendantEventThreatDetectionCustomModulesRequest", "ListDescendantEventThreatDetectionCustomModulesResponse", @@ -71,13 +77,17 @@ "ListEffectiveSecurityHealthAnalyticsCustomModulesResponse", "ListEventThreatDetectionCustomModulesRequest", "ListEventThreatDetectionCustomModulesResponse", + "ListSecurityCenterServicesRequest", + "ListSecurityCenterServicesResponse", "ListSecurityHealthAnalyticsCustomModulesRequest", "ListSecurityHealthAnalyticsCustomModulesResponse", + "SecurityCenterService", "SecurityHealthAnalyticsCustomModule", "SimulatedFinding", "SimulateSecurityHealthAnalyticsCustomModuleRequest", "SimulateSecurityHealthAnalyticsCustomModuleResponse", "UpdateEventThreatDetectionCustomModuleRequest", + "UpdateSecurityCenterServiceRequest", "UpdateSecurityHealthAnalyticsCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleResponse", diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py index 3b4258f2fdd0..93c715ebdc85 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py @@ -29,6 +29,7 @@ __protobuf__ = proto.module( package="google.cloud.securitycentermanagement.v1", manifest={ + "SecurityCenterService", "EffectiveSecurityHealthAnalyticsCustomModule", "ListEffectiveSecurityHealthAnalyticsCustomModulesRequest", "ListEffectiveSecurityHealthAnalyticsCustomModulesResponse", @@ -61,10 +62,162 @@ "DeleteEventThreatDetectionCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleRequest", "ValidateEventThreatDetectionCustomModuleResponse", + "GetSecurityCenterServiceRequest", + "ListSecurityCenterServicesRequest", + "ListSecurityCenterServicesResponse", + "UpdateSecurityCenterServiceRequest", }, ) +class SecurityCenterService(proto.Message): + r"""Represents a particular Security Command Center service. This + includes settings information such as top-level enablement in + addition to individual module settings. Service settings can be + configured at the organization, folder, or project level. + Service settings at the organization or folder level are + inherited by those in child folders and projects. + + Attributes: + name (str): + Identifier. The name of the service. + + Its format is: + + - organizations/{organization}/locations/{location}/securityCenterServices/{service} + - folders/{folder}/locations/{location}/securityCenterServices/{service} + - projects/{project}/locations/{location}/securityCenterServices/{service} + + The possible values for id {service} are: + + - container-threat-detection + - event-threat-detection + - security-health-analytics + - vm-threat-detection + - web-security-scanner + intended_enablement_state (google.cloud.securitycentermanagement_v1.types.SecurityCenterService.EnablementState): + Optional. The intended state of enablement for the service + at its level of the resource hierarchy. A DISABLED state + will override all module enablement_states to DISABLED. + effective_enablement_state (google.cloud.securitycentermanagement_v1.types.SecurityCenterService.EnablementState): + Output only. The effective enablement state + for the service at its level of the resource + hierarchy. If the intended state is set to + INHERITED, the effective state will be inherited + from the enablement state of an ancestor. This + state may differ from the intended enablement + state due to billing eligibility or onboarding + status. + modules (MutableMapping[str, google.cloud.securitycentermanagement_v1.types.SecurityCenterService.ModuleSettings]): + Optional. The configurations including the + state of enablement for the service's different + modules. The absence of a module in the map + implies its configuration is inherited from its + parents. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the service was last + updated. This could be due to an explicit user + update or due to a side effect of another system + change such as billing subscription expiry. + service_config (google.protobuf.struct_pb2.Struct): + Optional. Additional service specific + configuration. Not all services will utilize + this field. + """ + + class EnablementState(proto.Enum): + r"""Represents the possible intended states of enablement for a + service or module. + + Values: + ENABLEMENT_STATE_UNSPECIFIED (0): + Default value. This value is unused. + INHERITED (1): + State is inherited from the parent resource. + Not a valid effective enablement state. + ENABLED (2): + State is enabled. + DISABLED (3): + State is disabled. + INGEST_ONLY (4): + SCC is configured to ingest findings from this service but + not enable this service. Not a valid + intended_enablement_state (that is, this is a readonly + state). + """ + + ENABLEMENT_STATE_UNSPECIFIED = 0 + INHERITED = 1 + ENABLED = 2 + DISABLED = 3 + INGEST_ONLY = 4 + + class ModuleSettings(proto.Message): + r"""The settings for individual modules. + + Attributes: + intended_enablement_state (google.cloud.securitycentermanagement_v1.types.SecurityCenterService.EnablementState): + Optional. The intended state of enablement + for the module at its level of the resource + hierarchy. + effective_enablement_state (google.cloud.securitycentermanagement_v1.types.SecurityCenterService.EnablementState): + Output only. The effective enablement state + for the module at its level of the resource + hierarchy. If the intended state is set to + INHERITED, the effective state will be inherited + from the enablement state of an ancestor. This + state may + differ from the intended enablement state due to + billing eligibility or onboarding status. + """ + + intended_enablement_state: "SecurityCenterService.EnablementState" = ( + proto.Field( + proto.ENUM, + number=1, + enum="SecurityCenterService.EnablementState", + ) + ) + effective_enablement_state: "SecurityCenterService.EnablementState" = ( + proto.Field( + proto.ENUM, + number=2, + enum="SecurityCenterService.EnablementState", + ) + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + intended_enablement_state: EnablementState = proto.Field( + proto.ENUM, + number=2, + enum=EnablementState, + ) + effective_enablement_state: EnablementState = proto.Field( + proto.ENUM, + number=3, + enum=EnablementState, + ) + modules: MutableMapping[str, ModuleSettings] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message=ModuleSettings, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + service_config: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + + class EffectiveSecurityHealthAnalyticsCustomModule(proto.Message): r"""An EffectiveSecurityHealthAnalyticsCustomModule is the representation of a Security Health Analytics custom module at a @@ -80,13 +233,12 @@ class EffectiveSecurityHealthAnalyticsCustomModule(proto.Message): Attributes: name (str): - Identifier. The resource name of the custom module. Its - format is - "organizations/{organization}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}", - or - "folders/{folder}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}", - or - "projects/{project}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}". + Identifier. The full resource name of the custom module, + specified in one of the following formats: + + - ``organizations/organization/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``folders/folder/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``projects/project/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` custom_config (google.cloud.securitycentermanagement_v1.types.CustomConfig): Output only. The user-specified configuration for the module. @@ -113,6 +265,7 @@ class EnablementState(proto.Enum): DISABLED (2): The module is disabled at the given level. """ + ENABLEMENT_STATE_UNSPECIFIED = 0 ENABLED = 1 DISABLED = 2 @@ -143,12 +296,12 @@ class ListEffectiveSecurityHealthAnalyticsCustomModulesRequest(proto.Message): Attributes: parent (str): - Required. Name of parent to list effective - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of parent to list effective custom modules. + specified in one of the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` or + ``projects/{project}/locations/{location}`` page_size (int): Optional. The maximum number of results to return in a single response. Default is 10, @@ -208,13 +361,12 @@ class GetEffectiveSecurityHealthAnalyticsCustomModuleRequest(proto.Message): Attributes: name (str): - Required. The resource name of the SHA custom module. + Required. The full resource name of the custom module, + specified in one of the following formats: - Its format is: - - - "organizations/{organization}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". - - "folders/{folder}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". - - "projects/{project}/locations/{location}/effectiveSecurityHealthAnalyticsCustomModules/{module_id}". + - ``organizations/organization/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``folders/folder/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` + - ``projects/project/{location}/effectiveSecurityHealthAnalyticsCustomModules/{effective_security_health_analytics_custom_module}`` """ name: str = proto.Field( @@ -233,16 +385,12 @@ class SecurityHealthAnalyticsCustomModule(proto.Message): Attributes: name (str): - Identifier. The resource name of the custom module. Its - format is - "organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}", - or - "folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}", - or - "projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}" - - The id {customModule} is server-generated and is not user - settable. It will be a numeric id containing 1-20 digits. + Identifier. The full resource name of the custom module, + specified in one of the following formats: + + - ``organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}`` + - ``folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}`` + - ``projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}`` display_name (str): Optional. The display name of the Security Health Analytics custom module. This display @@ -293,6 +441,7 @@ class EnablementState(proto.Enum): to the INHERITED state will result in an INVALID_ARGUMENT error. """ + ENABLEMENT_STATE_UNSPECIFIED = 0 ENABLED = 1 DISABLED = 2 @@ -384,6 +533,7 @@ class Severity(proto.Enum): LOW (4): Low severity. """ + SEVERITY_UNSPECIFIED = 0 CRITICAL = 1 HIGH = 2 @@ -485,12 +635,13 @@ class ListSecurityHealthAnalyticsCustomModulesRequest(proto.Message): Attributes: parent (str): - Required. Name of parent to list custom - modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of parent organization, folder, or project in + which to list custom modules, specified in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` page_size (int): Optional. The maximum number of results to return in a single response. Default is 10, @@ -550,12 +701,13 @@ class ListDescendantSecurityHealthAnalyticsCustomModulesRequest(proto.Message): Attributes: parent (str): - Required. Name of parent to list custom - modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of the parent organization, folder, or + project in which to list custom modules, specified in one of + the following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` page_size (int): Optional. The maximum number of results to return in a single response. Default is 10, @@ -628,12 +780,13 @@ class CreateSecurityHealthAnalyticsCustomModuleRequest(proto.Message): Attributes: parent (str): - Required. Name of the parent for the module. - Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of the parent organization, folder, or + project of the module, specified in one of the following + formats: + + - ``organizations/{organization}/locations/{location}`` + - ``folders/{folder}/locations/{location}`` + - ``projects/{project}/locations/{location}`` security_health_analytics_custom_module (google.cloud.securitycentermanagement_v1.types.SecurityHealthAnalyticsCustomModule): Required. The resource being created validate_only (bool): @@ -687,10 +840,9 @@ class UpdateSecurityHealthAnalyticsCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually update the - module could still fail because 1. the state - could have changed (e.g. IAM permission lost) or - 2. A failure occurred while trying to update the - module. + module could still fail because + 1. the state could have changed (e.g. IAM permission lost) or + 2. A failure occurred while trying to update the module. """ update_mask: field_mask_pb2.FieldMask = proto.Field( @@ -720,9 +872,9 @@ class DeleteSecurityHealthAnalyticsCustomModuleRequest(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". - - "folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". - - "projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}". + - ``organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. + - ``folders/{folder}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. + - ``projects/{project}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}``. validate_only (bool): Optional. When set to true, only validations (including IAM checks) will done for the request @@ -730,10 +882,9 @@ class DeleteSecurityHealthAnalyticsCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually delete the - module could still fail because 1. the state - could have changed (e.g. IAM permission lost) or - 2. A failure occurred while trying to delete the - module. + module could still fail because + 1. the state could have changed (e.g. IAM permission lost) or + 2. A failure occurred while trying to delete the module. """ name: str = proto.Field( @@ -824,15 +975,15 @@ class SimulatedFinding(proto.Message): Identifier. The `relative resource name `__ of the finding. Example: - "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", - "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", - "projects/{project_id}/sources/{source_id}/findings/{finding_id}". + ``organizations/{organization_id}/sources/{source_id}/findings/{finding_id}``, + ``folders/{folder_id}/sources/{source_id}/findings/{finding_id}``, + ``projects/{project_id}/sources/{source_id}/findings/{finding_id}``. parent (str): The relative resource name of the source the finding belongs to. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name This field is immutable after creation time. For example: - "organizations/{organization_id}/sources/{source_id}". + ``organizations/{organization_id}/sources/{source_id}`` resource_name (str): For findings on Google Cloud resources, the full resource name of the Google Cloud resource this finding is for. See: @@ -884,6 +1035,7 @@ class State(proto.Enum): non-issue or otherwise addressed and is no longer active. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 INACTIVE = 2 @@ -970,6 +1122,7 @@ class Severity(proto.Enum): access to an environment but is not able to access data, execute code, or create resources. """ + SEVERITY_UNSPECIFIED = 0 CRITICAL = 1 HIGH = 2 @@ -1001,7 +1154,12 @@ class FindingClass(proto.Enum): POSTURE_VIOLATION (6): Describes a potential security risk due to a change in the security posture. + TOXIC_COMBINATION (7): + Describes a combination of security issues + that represent a more severe security problem + when taken together. """ + FINDING_CLASS_UNSPECIFIED = 0 THREAT = 1 VULNERABILITY = 2 @@ -1009,6 +1167,7 @@ class FindingClass(proto.Enum): OBSERVATION = 4 SCC_ERROR = 5 POSTURE_VIOLATION = 6 + TOXIC_COMBINATION = 7 name: str = proto.Field( proto.STRING, @@ -1133,9 +1292,9 @@ class EffectiveEventThreatDetectionCustomModule(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. config (google.protobuf.struct_pb2.Struct): Output only. Config for the effective module. enablement_state (google.cloud.securitycentermanagement_v1.types.EffectiveEventThreatDetectionCustomModule.EnablementState): @@ -1162,6 +1321,7 @@ class EnablementState(proto.Enum): DISABLED (2): The module is disabled at the given level. """ + ENABLEMENT_STATE_UNSPECIFIED = 0 ENABLED = 1 DISABLED = 2 @@ -1200,12 +1360,11 @@ class ListEffectiveEventThreatDetectionCustomModulesRequest(proto.Message): Attributes: parent (str): - Required. Name of parent to list effective - custom modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of parent to list effective custom modules. + Its format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` page_size (int): Optional. The maximum number of results to return in a single response. Default is 10, @@ -1269,9 +1428,9 @@ class GetEffectiveEventThreatDetectionCustomModuleRequest(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/effectiveEventThreatDetectionCustomModules/{effective_event_threat_detection_custom_module}``. """ name: str = proto.Field( @@ -1292,9 +1451,9 @@ class EventThreatDetectionCustomModule(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. config (google.protobuf.struct_pb2.Struct): Optional. Config for the module. For the resident module, its config value is defined at @@ -1345,6 +1504,7 @@ class EnablementState(proto.Enum): parent) to the INHERITED state will result in an error. """ + ENABLEMENT_STATE_UNSPECIFIED = 0 ENABLED = 1 DISABLED = 2 @@ -1397,12 +1557,10 @@ class ListEventThreatDetectionCustomModulesRequest(proto.Message): Attributes: parent (str): - Required. Name of parent to list custom - modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of parent to list custom modules. Its format + is ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` page_size (int): Optional. The maximum number of modules to return. The service may return fewer than this @@ -1468,12 +1626,10 @@ class ListDescendantEventThreatDetectionCustomModulesRequest(proto.Message): Attributes: parent (str): - Required. Name of parent to list custom - modules. Its format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of parent to list custom modules. Its format + is ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` page_size (int): Optional. The maximum number of modules to return. The service may return fewer than this @@ -1537,9 +1693,9 @@ class GetEventThreatDetectionCustomModuleRequest(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. """ name: str = proto.Field( @@ -1553,12 +1709,10 @@ class CreateEventThreatDetectionCustomModuleRequest(proto.Message): Attributes: parent (str): - Required. Name of parent for the module. Its - format is - "organizations/{organization}/locations/{location}", - "folders/{folder}/locations/{location}", - or - "projects/{project}/locations/{location}". + Required. Name of parent for the module. Its format is + ``organizations/{organization}/locations/{location}``, + ``folders/{folder}/locations/{location}``, or + ``projects/{project}/locations/{location}`` event_threat_detection_custom_module (google.cloud.securitycentermanagement_v1.types.EventThreatDetectionCustomModule): Required. The module to create. The event_threat_detection_custom_module.name will be ignored @@ -1570,10 +1724,9 @@ class CreateEventThreatDetectionCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually create the - module could still fail because 1. the state - could have changed (e.g. IAM permission lost) or - 2. A failure occurred during creation of the - module. + module could still fail because + 1. the state could have changed (e.g. IAM permission lost) or + 2. A failure occurred during creation of the module. """ parent: str = proto.Field( @@ -1613,10 +1766,9 @@ class UpdateEventThreatDetectionCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually update the - module could still fail because 1. the state - could have changed (e.g. IAM permission lost) or - 2. A failure occurred while trying to update the - module. + module could still fail because + 1. the state could have changed (e.g. IAM permission lost) or + 2. A failure occurred while trying to update the module. """ update_mask: field_mask_pb2.FieldMask = proto.Field( @@ -1646,9 +1798,9 @@ class DeleteEventThreatDetectionCustomModuleRequest(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". - - "projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}". + - ``organizations/{organization}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``folders/{folder}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. + - ``projects/{project}/locations/{location}/eventThreatDetectionCustomModules/{event_threat_detection_custom_module}``. validate_only (bool): Optional. When set to true, only validations (including IAM checks) will done for the request @@ -1656,10 +1808,9 @@ class DeleteEventThreatDetectionCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually delete the - module could still fail because 1. the state - could have changed (e.g. IAM permission lost) or - 2. A failure occurred while trying to delete the - module. + module could still fail because + 1. the state could have changed (e.g. IAM permission lost) or + 2. A failure occurred while trying to delete the module. """ name: str = proto.Field( @@ -1682,7 +1833,7 @@ class ValidateEventThreatDetectionCustomModuleRequest(proto.Message): Its format is: - - "organizations/{organization}/locations/{location}". + - ``organizations/{organization}/locations/{location}``. raw_text (str): Required. The raw text of the module's contents. Used to generate error messages. @@ -1794,4 +1945,160 @@ class Position(proto.Message): ) +class GetSecurityCenterServiceRequest(proto.Message): + r"""Request message for getting a Security Command Center + service. + + Attributes: + name (str): + Required. The Security Command Center service to retrieve. + + Formats: + + - organizations/{organization}/locations/{location}/securityCenterServices/{service} + - folders/{folder}/locations/{location}/securityCenterServices/{service} + - projects/{project}/locations/{location}/securityCenterServices/{service} + + The possible values for id {service} are: + + - container-threat-detection + - event-threat-detection + - security-health-analytics + - vm-threat-detection + - web-security-scanner + show_eligible_modules_only (bool): + Flag that, when set, will be used to filter + the ModuleSettings that are in scope. The + default setting is that all modules will be + shown. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + show_eligible_modules_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListSecurityCenterServicesRequest(proto.Message): + r"""Request message for listing Security Command Center services. + + Attributes: + parent (str): + Required. The name of the parent to list Security Command + Center services. + + Formats: + + - organizations/{organization}/locations/{location} + - folders/{folder}/locations/{location} + - projects/{project}/locations/{location} + page_size (int): + Optional. The maximum number of results to + return in a single response. Default is 10, + minimum is 1, maximum is 1000. + page_token (str): + Optional. The value returned by the last call + indicating a continuation. + show_eligible_modules_only (bool): + Flag that, when set, will be used to filter + the ModuleSettings that are in scope. The + default setting is that all modules will be + shown. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + show_eligible_modules_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListSecurityCenterServicesResponse(proto.Message): + r"""Response message for listing Security Command Center + services. + + Attributes: + security_center_services (MutableSequence[google.cloud.securitycentermanagement_v1.types.SecurityCenterService]): + The list of services. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + security_center_services: MutableSequence[ + "SecurityCenterService" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SecurityCenterService", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateSecurityCenterServiceRequest(proto.Message): + r"""Request message for updating a Security Command Center + service. + + Attributes: + security_center_service (google.cloud.securitycentermanagement_v1.types.SecurityCenterService): + Required. The updated service. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Possible values: + + - "intended_enablement_state" + - "modules". + validate_only (bool): + Optional. When set to true, only validations + (including IAM checks) will be done for the + request (service will not be updated). An OK + response indicates that the request is valid, + while an error response indicates that the + request is invalid. Note that a subsequent + request to actually update the service could + still fail for one of the following reasons: + + - The state could have changed (e.g. IAM + permission lost). + - A failure occurred while trying to delete the + module. + """ + + security_center_service: "SecurityCenterService" = proto.Field( + proto.MESSAGE, + number=1, + message="SecurityCenterService", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_get_security_center_service_async.py b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_get_security_center_service_async.py new file mode 100644 index 000000000000..93c73c2212dc --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_get_security_center_service_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSecurityCenterService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-securitycentermanagement + + +# [START securitycentermanagement_v1_generated_SecurityCenterManagement_GetSecurityCenterService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import securitycentermanagement_v1 + + +async def sample_get_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementAsyncClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.GetSecurityCenterServiceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_center_service(request=request) + + # Handle the response + print(response) + +# [END securitycentermanagement_v1_generated_SecurityCenterManagement_GetSecurityCenterService_async] diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_get_security_center_service_sync.py b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_get_security_center_service_sync.py new file mode 100644 index 000000000000..30ac10e393f7 --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_get_security_center_service_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSecurityCenterService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-securitycentermanagement + + +# [START securitycentermanagement_v1_generated_SecurityCenterManagement_GetSecurityCenterService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import securitycentermanagement_v1 + + +def sample_get_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.GetSecurityCenterServiceRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_center_service(request=request) + + # Handle the response + print(response) + +# [END securitycentermanagement_v1_generated_SecurityCenterManagement_GetSecurityCenterService_sync] diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_list_security_center_services_async.py b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_list_security_center_services_async.py new file mode 100644 index 000000000000..9d075d51faf3 --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_list_security_center_services_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSecurityCenterServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-securitycentermanagement + + +# [START securitycentermanagement_v1_generated_SecurityCenterManagement_ListSecurityCenterServices_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import securitycentermanagement_v1 + + +async def sample_list_security_center_services(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementAsyncClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.ListSecurityCenterServicesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_center_services(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END securitycentermanagement_v1_generated_SecurityCenterManagement_ListSecurityCenterServices_async] diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_list_security_center_services_sync.py b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_list_security_center_services_sync.py new file mode 100644 index 000000000000..3126f08db8d7 --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_list_security_center_services_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSecurityCenterServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-securitycentermanagement + + +# [START securitycentermanagement_v1_generated_SecurityCenterManagement_ListSecurityCenterServices_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import securitycentermanagement_v1 + + +def sample_list_security_center_services(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.ListSecurityCenterServicesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_center_services(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END securitycentermanagement_v1_generated_SecurityCenterManagement_ListSecurityCenterServices_sync] diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_update_security_center_service_async.py b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_update_security_center_service_async.py new file mode 100644 index 000000000000..700624c7a5d2 --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_update_security_center_service_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSecurityCenterService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-securitycentermanagement + + +# [START securitycentermanagement_v1_generated_SecurityCenterManagement_UpdateSecurityCenterService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import securitycentermanagement_v1 + + +async def sample_update_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementAsyncClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.UpdateSecurityCenterServiceRequest( + ) + + # Make the request + response = await client.update_security_center_service(request=request) + + # Handle the response + print(response) + +# [END securitycentermanagement_v1_generated_SecurityCenterManagement_UpdateSecurityCenterService_async] diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_update_security_center_service_sync.py b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_update_security_center_service_sync.py new file mode 100644 index 000000000000..be7f13462e91 --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/securitycentermanagement_v1_generated_security_center_management_update_security_center_service_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSecurityCenterService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-securitycentermanagement + + +# [START securitycentermanagement_v1_generated_SecurityCenterManagement_UpdateSecurityCenterService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import securitycentermanagement_v1 + + +def sample_update_security_center_service(): + # Create a client + client = securitycentermanagement_v1.SecurityCenterManagementClient() + + # Initialize request argument(s) + request = securitycentermanagement_v1.UpdateSecurityCenterServiceRequest( + ) + + # Make the request + response = client.update_security_center_service(request=request) + + # Handle the response + print(response) + +# [END securitycentermanagement_v1_generated_SecurityCenterManagement_UpdateSecurityCenterService_sync] diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json b/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json index 8e50d8fef945..2bd560d49ab1 100644 --- a/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycentermanagement", - "version": "0.1.0" + "version": "0.1.12" }, "snippets": [ { @@ -1142,6 +1142,167 @@ ], "title": "securitycentermanagement_v1_generated_security_center_management_get_event_threat_detection_custom_module_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementAsyncClient", + "shortName": "SecurityCenterManagementAsyncClient" + }, + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementAsyncClient.get_security_center_service", + "method": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement.GetSecurityCenterService", + "service": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement", + "shortName": "SecurityCenterManagement" + }, + "shortName": "GetSecurityCenterService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.securitycentermanagement_v1.types.GetSecurityCenterServiceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.securitycentermanagement_v1.types.SecurityCenterService", + "shortName": "get_security_center_service" + }, + "description": "Sample for GetSecurityCenterService", + "file": "securitycentermanagement_v1_generated_security_center_management_get_security_center_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "securitycentermanagement_v1_generated_SecurityCenterManagement_GetSecurityCenterService_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "securitycentermanagement_v1_generated_security_center_management_get_security_center_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementClient", + "shortName": "SecurityCenterManagementClient" + }, + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementClient.get_security_center_service", + "method": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement.GetSecurityCenterService", + "service": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement", + "shortName": "SecurityCenterManagement" + }, + "shortName": "GetSecurityCenterService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.securitycentermanagement_v1.types.GetSecurityCenterServiceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.securitycentermanagement_v1.types.SecurityCenterService", + "shortName": "get_security_center_service" + }, + "description": "Sample for GetSecurityCenterService", + "file": "securitycentermanagement_v1_generated_security_center_management_get_security_center_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "securitycentermanagement_v1_generated_SecurityCenterManagement_GetSecurityCenterService_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "securitycentermanagement_v1_generated_security_center_management_get_security_center_service_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2108,6 +2269,167 @@ ], "title": "securitycentermanagement_v1_generated_security_center_management_list_event_threat_detection_custom_modules_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementAsyncClient", + "shortName": "SecurityCenterManagementAsyncClient" + }, + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementAsyncClient.list_security_center_services", + "method": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListSecurityCenterServices", + "service": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement", + "shortName": "SecurityCenterManagement" + }, + "shortName": "ListSecurityCenterServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.securitycentermanagement_v1.services.security_center_management.pagers.ListSecurityCenterServicesAsyncPager", + "shortName": "list_security_center_services" + }, + "description": "Sample for ListSecurityCenterServices", + "file": "securitycentermanagement_v1_generated_security_center_management_list_security_center_services_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "securitycentermanagement_v1_generated_SecurityCenterManagement_ListSecurityCenterServices_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "securitycentermanagement_v1_generated_security_center_management_list_security_center_services_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementClient", + "shortName": "SecurityCenterManagementClient" + }, + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementClient.list_security_center_services", + "method": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListSecurityCenterServices", + "service": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement", + "shortName": "SecurityCenterManagement" + }, + "shortName": "ListSecurityCenterServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.securitycentermanagement_v1.types.ListSecurityCenterServicesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.securitycentermanagement_v1.services.security_center_management.pagers.ListSecurityCenterServicesPager", + "shortName": "list_security_center_services" + }, + "description": "Sample for ListSecurityCenterServices", + "file": "securitycentermanagement_v1_generated_security_center_management_list_security_center_services_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "securitycentermanagement_v1_generated_SecurityCenterManagement_ListSecurityCenterServices_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "securitycentermanagement_v1_generated_security_center_management_list_security_center_services_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2615,6 +2937,175 @@ ], "title": "securitycentermanagement_v1_generated_security_center_management_update_event_threat_detection_custom_module_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementAsyncClient", + "shortName": "SecurityCenterManagementAsyncClient" + }, + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementAsyncClient.update_security_center_service", + "method": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement.UpdateSecurityCenterService", + "service": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement", + "shortName": "SecurityCenterManagement" + }, + "shortName": "UpdateSecurityCenterService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.securitycentermanagement_v1.types.UpdateSecurityCenterServiceRequest" + }, + { + "name": "security_center_service", + "type": "google.cloud.securitycentermanagement_v1.types.SecurityCenterService" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.securitycentermanagement_v1.types.SecurityCenterService", + "shortName": "update_security_center_service" + }, + "description": "Sample for UpdateSecurityCenterService", + "file": "securitycentermanagement_v1_generated_security_center_management_update_security_center_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "securitycentermanagement_v1_generated_SecurityCenterManagement_UpdateSecurityCenterService_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "securitycentermanagement_v1_generated_security_center_management_update_security_center_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementClient", + "shortName": "SecurityCenterManagementClient" + }, + "fullName": "google.cloud.securitycentermanagement_v1.SecurityCenterManagementClient.update_security_center_service", + "method": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement.UpdateSecurityCenterService", + "service": { + "fullName": "google.cloud.securitycentermanagement.v1.SecurityCenterManagement", + "shortName": "SecurityCenterManagement" + }, + "shortName": "UpdateSecurityCenterService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.securitycentermanagement_v1.types.UpdateSecurityCenterServiceRequest" + }, + { + "name": "security_center_service", + "type": "google.cloud.securitycentermanagement_v1.types.SecurityCenterService" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.securitycentermanagement_v1.types.SecurityCenterService", + "shortName": "update_security_center_service" + }, + "description": "Sample for UpdateSecurityCenterService", + "file": "securitycentermanagement_v1_generated_security_center_management_update_security_center_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "securitycentermanagement_v1_generated_SecurityCenterManagement_UpdateSecurityCenterService_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "securitycentermanagement_v1_generated_security_center_management_update_security_center_service_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-securitycentermanagement/scripts/fixup_securitycentermanagement_v1_keywords.py b/packages/google-cloud-securitycentermanagement/scripts/fixup_securitycentermanagement_v1_keywords.py index 71591f8a08b6..c2c2a955cca7 100644 --- a/packages/google-cloud-securitycentermanagement/scripts/fixup_securitycentermanagement_v1_keywords.py +++ b/packages/google-cloud-securitycentermanagement/scripts/fixup_securitycentermanagement_v1_keywords.py @@ -46,15 +46,18 @@ class securitycentermanagementCallTransformer(cst.CSTTransformer): 'get_effective_event_threat_detection_custom_module': ('name', ), 'get_effective_security_health_analytics_custom_module': ('name', ), 'get_event_threat_detection_custom_module': ('name', ), + 'get_security_center_service': ('name', 'show_eligible_modules_only', ), 'get_security_health_analytics_custom_module': ('name', ), 'list_descendant_event_threat_detection_custom_modules': ('parent', 'page_size', 'page_token', ), 'list_descendant_security_health_analytics_custom_modules': ('parent', 'page_size', 'page_token', ), 'list_effective_event_threat_detection_custom_modules': ('parent', 'page_size', 'page_token', ), 'list_effective_security_health_analytics_custom_modules': ('parent', 'page_size', 'page_token', ), 'list_event_threat_detection_custom_modules': ('parent', 'page_size', 'page_token', ), + 'list_security_center_services': ('parent', 'page_size', 'page_token', 'show_eligible_modules_only', ), 'list_security_health_analytics_custom_modules': ('parent', 'page_size', 'page_token', ), 'simulate_security_health_analytics_custom_module': ('parent', 'custom_config', 'resource', ), 'update_event_threat_detection_custom_module': ('update_mask', 'event_threat_detection_custom_module', 'validate_only', ), + 'update_security_center_service': ('security_center_service', 'update_mask', 'validate_only', ), 'update_security_health_analytics_custom_module': ('update_mask', 'security_health_analytics_custom_module', 'validate_only', ), 'validate_event_threat_detection_custom_module': ('parent', 'raw_text', 'type_', ), } diff --git a/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py b/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py index ddcb2d73df08..4c10a35d4b18 100644 --- a/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py +++ b/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py @@ -1682,15 +1682,15 @@ def test_list_effective_security_health_analytics_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_effective_security_health_analytics_custom_modules( request={} ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2784,13 +2784,13 @@ def test_list_security_health_analytics_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_security_health_analytics_custom_modules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3443,15 +3443,15 @@ def test_list_descendant_security_health_analytics_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_descendant_security_health_analytics_custom_modules( request={} ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6383,13 +6383,13 @@ def test_list_effective_event_threat_detection_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_effective_event_threat_detection_custom_modules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7479,13 +7479,13 @@ def test_list_event_threat_detection_custom_modules_pager(transport_name: str = RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_event_threat_detection_custom_modules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8124,13 +8124,13 @@ def test_list_descendant_event_threat_detection_custom_modules_pager( RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_descendant_event_threat_detection_custom_modules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10424,6 +10424,1463 @@ async def test_validate_event_threat_detection_custom_module_field_headers_async ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.GetSecurityCenterServiceRequest, + dict, + ], +) +def test_get_security_center_service(request_type, transport: str = "grpc"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + ) + response = client.get_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_center_management.GetSecurityCenterServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_center_management.SecurityCenterService) + assert response.name == "name_value" + assert ( + response.intended_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + assert ( + response.effective_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + + +def test_get_security_center_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_security_center_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_center_management.GetSecurityCenterServiceRequest() + + +def test_get_security_center_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_center_management.GetSecurityCenterServiceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_security_center_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_center_management.GetSecurityCenterServiceRequest( + name="name_value", + ) + + +def test_get_security_center_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_security_center_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_security_center_service + ] = mock_rpc + request = {} + client.get_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_security_center_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_security_center_service_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + ) + ) + response = await client.get_security_center_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_center_management.GetSecurityCenterServiceRequest() + + +@pytest.mark.asyncio +async def test_get_security_center_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_security_center_service + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_security_center_service + ] = mock_object + + request = {} + await client.get_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_security_center_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_security_center_service_async( + transport: str = "grpc_asyncio", + request_type=security_center_management.GetSecurityCenterServiceRequest, +): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + ) + ) + response = await client.get_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_center_management.GetSecurityCenterServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_center_management.SecurityCenterService) + assert response.name == "name_value" + assert ( + response.intended_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + assert ( + response.effective_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + + +@pytest.mark.asyncio +async def test_get_security_center_service_async_from_dict(): + await test_get_security_center_service_async(request_type=dict) + + +def test_get_security_center_service_field_headers(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_center_management.GetSecurityCenterServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + call.return_value = security_center_management.SecurityCenterService() + client.get_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_security_center_service_field_headers_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_center_management.GetSecurityCenterServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService() + ) + await client.get_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_security_center_service_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_center_management.SecurityCenterService() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_security_center_service( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_security_center_service_flattened_error(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_security_center_service( + security_center_management.GetSecurityCenterServiceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_security_center_service_flattened_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_center_management.SecurityCenterService() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_security_center_service( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_security_center_service_flattened_error_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_security_center_service( + security_center_management.GetSecurityCenterServiceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.ListSecurityCenterServicesRequest, + dict, + ], +) +def test_list_security_center_services(request_type, transport: str = "grpc"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_center_management.ListSecurityCenterServicesResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_security_center_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_center_management.ListSecurityCenterServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityCenterServicesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_security_center_services_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_security_center_services() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_center_management.ListSecurityCenterServicesRequest() + + +def test_list_security_center_services_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_center_management.ListSecurityCenterServicesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_security_center_services(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_center_management.ListSecurityCenterServicesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_security_center_services_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_security_center_services + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_security_center_services + ] = mock_rpc + request = {} + client.list_security_center_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_security_center_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_security_center_services_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.ListSecurityCenterServicesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_security_center_services() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_center_management.ListSecurityCenterServicesRequest() + + +@pytest.mark.asyncio +async def test_list_security_center_services_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_security_center_services + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_security_center_services + ] = mock_object + + request = {} + await client.list_security_center_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_security_center_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_security_center_services_async( + transport: str = "grpc_asyncio", + request_type=security_center_management.ListSecurityCenterServicesRequest, +): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.ListSecurityCenterServicesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_security_center_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_center_management.ListSecurityCenterServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityCenterServicesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_security_center_services_async_from_dict(): + await test_list_security_center_services_async(request_type=dict) + + +def test_list_security_center_services_field_headers(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_center_management.ListSecurityCenterServicesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + call.return_value = ( + security_center_management.ListSecurityCenterServicesResponse() + ) + client.list_security_center_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_security_center_services_field_headers_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_center_management.ListSecurityCenterServicesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.ListSecurityCenterServicesResponse() + ) + await client.list_security_center_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_security_center_services_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_center_management.ListSecurityCenterServicesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_security_center_services( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_security_center_services_flattened_error(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_security_center_services( + security_center_management.ListSecurityCenterServicesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_security_center_services_flattened_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_center_management.ListSecurityCenterServicesResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.ListSecurityCenterServicesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_security_center_services( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_security_center_services_flattened_error_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_security_center_services( + security_center_management.ListSecurityCenterServicesRequest(), + parent="parent_value", + ) + + +def test_list_security_center_services_pager(transport_name: str = "grpc"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + next_page_token="abc", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[], + next_page_token="def", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + ], + next_page_token="ghi", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_security_center_services(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_center_management.SecurityCenterService) + for i in results + ) + + +def test_list_security_center_services_pages(transport_name: str = "grpc"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + next_page_token="abc", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[], + next_page_token="def", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + ], + next_page_token="ghi", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + ), + RuntimeError, + ) + pages = list(client.list_security_center_services(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_security_center_services_async_pager(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + next_page_token="abc", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[], + next_page_token="def", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + ], + next_page_token="ghi", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_security_center_services( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, security_center_management.SecurityCenterService) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_security_center_services_async_pages(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_center_services), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + next_page_token="abc", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[], + next_page_token="def", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + ], + next_page_token="ghi", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_security_center_services(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.UpdateSecurityCenterServiceRequest, + dict, + ], +) +def test_update_security_center_service(request_type, transport: str = "grpc"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + ) + response = client.update_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_center_management.UpdateSecurityCenterServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_center_management.SecurityCenterService) + assert response.name == "name_value" + assert ( + response.intended_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + assert ( + response.effective_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + + +def test_update_security_center_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_security_center_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == security_center_management.UpdateSecurityCenterServiceRequest() + ) + + +def test_update_security_center_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_center_management.UpdateSecurityCenterServiceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_security_center_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == security_center_management.UpdateSecurityCenterServiceRequest() + ) + + +def test_update_security_center_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_security_center_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_security_center_service + ] = mock_rpc + request = {} + client.update_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_security_center_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_security_center_service_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + ) + ) + response = await client.update_security_center_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == security_center_management.UpdateSecurityCenterServiceRequest() + ) + + +@pytest.mark.asyncio +async def test_update_security_center_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_security_center_service + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_security_center_service + ] = mock_object + + request = {} + await client.update_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_security_center_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_security_center_service_async( + transport: str = "grpc_asyncio", + request_type=security_center_management.UpdateSecurityCenterServiceRequest, +): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + ) + ) + response = await client.update_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_center_management.UpdateSecurityCenterServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_center_management.SecurityCenterService) + assert response.name == "name_value" + assert ( + response.intended_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + assert ( + response.effective_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + + +@pytest.mark.asyncio +async def test_update_security_center_service_async_from_dict(): + await test_update_security_center_service_async(request_type=dict) + + +def test_update_security_center_service_field_headers(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_center_management.UpdateSecurityCenterServiceRequest() + + request.security_center_service.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + call.return_value = security_center_management.SecurityCenterService() + client.update_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "security_center_service.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_security_center_service_field_headers_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_center_management.UpdateSecurityCenterServiceRequest() + + request.security_center_service.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService() + ) + await client.update_security_center_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "security_center_service.name=name_value", + ) in kw["metadata"] + + +def test_update_security_center_service_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_center_management.SecurityCenterService() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_security_center_service( + security_center_service=security_center_management.SecurityCenterService( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].security_center_service + mock_val = security_center_management.SecurityCenterService(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_security_center_service_flattened_error(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_security_center_service( + security_center_management.UpdateSecurityCenterServiceRequest(), + security_center_service=security_center_management.SecurityCenterService( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_security_center_service_flattened_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_center_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_center_management.SecurityCenterService() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_center_management.SecurityCenterService() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_security_center_service( + security_center_service=security_center_management.SecurityCenterService( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].security_center_service + mock_val = security_center_management.SecurityCenterService(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_security_center_service_flattened_error_async(): + client = SecurityCenterManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_security_center_service( + security_center_management.UpdateSecurityCenterServiceRequest(), + security_center_service=security_center_management.SecurityCenterService( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -10431,7 +11888,1225 @@ async def test_validate_event_threat_detection_custom_module_field_headers_async dict, ], ) -def test_list_effective_security_health_analytics_custom_modules_rest(request_type): +def test_list_effective_security_health_analytics_custom_modules_rest(request_type): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_effective_security_health_analytics_custom_modules( + request + ) + + # Establish that the response is the type that we expect. + assert isinstance( + response, pagers.ListEffectiveSecurityHealthAnalyticsCustomModulesPager + ) + assert response.next_page_token == "next_page_token_value" + + +def test_list_effective_security_health_analytics_custom_modules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_effective_security_health_analytics_custom_modules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_effective_security_health_analytics_custom_modules + ] = mock_rpc + + request = {} + client.list_effective_security_health_analytics_custom_modules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_effective_security_health_analytics_custom_modules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_effective_security_health_analytics_custom_modules_rest_required_fields( + request_type=security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest, +): + transport_class = transports.SecurityCenterManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_effective_security_health_analytics_custom_modules._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_effective_security_health_analytics_custom_modules._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_effective_security_health_analytics_custom_modules( + request + ) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_effective_security_health_analytics_custom_modules_rest_unset_required_fields(): + transport = transports.SecurityCenterManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_effective_security_health_analytics_custom_modules._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_effective_security_health_analytics_custom_modules_rest_interceptors( + null_interceptor, +): + transport = transports.SecurityCenterManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), + ) + client = SecurityCenterManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "post_list_effective_security_health_analytics_custom_modules", + ) as post, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "pre_list_effective_security_health_analytics_custom_modules", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.pb( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.to_json( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + ) + + request = ( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + ) + + client.list_effective_security_health_analytics_custom_modules( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_effective_security_health_analytics_custom_modules_rest_bad_request( + transport: str = "rest", + request_type=security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest, +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_effective_security_health_analytics_custom_modules(request) + + +def test_list_effective_security_health_analytics_custom_modules_rest_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_effective_security_health_analytics_custom_modules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/effectiveSecurityHealthAnalyticsCustomModules" + % client.transport._host, + args[1], + ) + + +def test_list_effective_security_health_analytics_custom_modules_rest_flattened_error( + transport: str = "rest", +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_effective_security_health_analytics_custom_modules( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest(), + parent="parent_value", + ) + + +def test_list_effective_security_health_analytics_custom_modules_rest_pager( + transport: str = "rest", +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( + effective_security_health_analytics_custom_modules=[ + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + ], + next_page_token="abc", + ), + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( + effective_security_health_analytics_custom_modules=[], + next_page_token="def", + ), + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( + effective_security_health_analytics_custom_modules=[ + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + ], + next_page_token="ghi", + ), + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( + effective_security_health_analytics_custom_modules=[ + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_effective_security_health_analytics_custom_modules( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance( + i, + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule, + ) + for i in results + ) + + pages = list( + client.list_effective_security_health_analytics_custom_modules( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, + dict, + ], +) +def test_get_effective_security_health_analytics_custom_module_rest(request_type): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/effectiveSecurityHealthAnalyticsCustomModules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_center_management.EffectiveSecurityHealthAnalyticsCustomModule( + name="name_value", + enablement_state=security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_effective_security_health_analytics_custom_module(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule, + ) + assert response.name == "name_value" + assert ( + response.enablement_state + == security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.EnablementState.ENABLED + ) + assert response.display_name == "display_name_value" + + +def test_get_effective_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_effective_security_health_analytics_custom_module + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_effective_security_health_analytics_custom_module + ] = mock_rpc + + request = {} + client.get_effective_security_health_analytics_custom_module(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_effective_security_health_analytics_custom_module(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_effective_security_health_analytics_custom_module_rest_required_fields( + request_type=security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, +): + transport_class = transports.SecurityCenterManagementRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_effective_security_health_analytics_custom_module._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_effective_security_health_analytics_custom_module._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_effective_security_health_analytics_custom_module( + request + ) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_effective_security_health_analytics_custom_module_rest_unset_required_fields(): + transport = transports.SecurityCenterManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_effective_security_health_analytics_custom_module._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_effective_security_health_analytics_custom_module_rest_interceptors( + null_interceptor, +): + transport = transports.SecurityCenterManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), + ) + client = SecurityCenterManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "post_get_effective_security_health_analytics_custom_module", + ) as post, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "pre_get_effective_security_health_analytics_custom_module", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest.pb( + security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.to_json( + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() + ) + + request = ( + security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() + ) + + client.get_effective_security_health_analytics_custom_module( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_effective_security_health_analytics_custom_module_rest_bad_request( + transport: str = "rest", + request_type=security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/effectiveSecurityHealthAnalyticsCustomModules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_effective_security_health_analytics_custom_module(request) + + +def test_get_effective_security_health_analytics_custom_module_rest_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() + ) + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/effectiveSecurityHealthAnalyticsCustomModules/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_effective_security_health_analytics_custom_module(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/effectiveSecurityHealthAnalyticsCustomModules/*}" + % client.transport._host, + args[1], + ) + + +def test_get_effective_security_health_analytics_custom_module_rest_flattened_error( + transport: str = "rest", +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_effective_security_health_analytics_custom_module( + security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest(), + name="name_value", + ) + + +def test_get_effective_security_health_analytics_custom_module_rest_error(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, + dict, + ], +) +def test_list_security_health_analytics_custom_modules_rest(request_type): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( + next_page_token="next_page_token_value", + ) + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_security_health_analytics_custom_modules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityHealthAnalyticsCustomModulesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_security_health_analytics_custom_modules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_security_health_analytics_custom_modules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_security_health_analytics_custom_modules + ] = mock_rpc + + request = {} + client.list_security_health_analytics_custom_modules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_security_health_analytics_custom_modules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_security_health_analytics_custom_modules_rest_required_fields( + request_type=security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, +): + transport_class = transports.SecurityCenterManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_security_health_analytics_custom_modules._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_security_health_analytics_custom_modules._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_security_health_analytics_custom_modules(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_security_health_analytics_custom_modules_rest_unset_required_fields(): + transport = transports.SecurityCenterManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_security_health_analytics_custom_modules._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_security_health_analytics_custom_modules_rest_interceptors( + null_interceptor, +): + transport = transports.SecurityCenterManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), + ) + client = SecurityCenterManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "post_list_security_health_analytics_custom_modules", + ) as post, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "pre_list_security_health_analytics_custom_modules", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest.pb( + security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.to_json( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() + ) + + request = ( + security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() + ) + + client.list_security_health_analytics_custom_modules( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_security_health_analytics_custom_modules_rest_bad_request( + transport: str = "rest", + request_type=security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_security_health_analytics_custom_modules(request) + + +def test_list_security_health_analytics_custom_modules_rest_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_security_health_analytics_custom_modules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/securityHealthAnalyticsCustomModules" + % client.transport._host, + args[1], + ) + + +def test_list_security_health_analytics_custom_modules_rest_flattened_error( + transport: str = "rest", +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_security_health_analytics_custom_modules( + security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest(), + parent="parent_value", + ) + + +def test_list_security_health_analytics_custom_modules_rest_pager( + transport: str = "rest", +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[ + security_center_management.SecurityHealthAnalyticsCustomModule(), + security_center_management.SecurityHealthAnalyticsCustomModule(), + security_center_management.SecurityHealthAnalyticsCustomModule(), + ], + next_page_token="abc", + ), + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[], + next_page_token="def", + ), + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[ + security_center_management.SecurityHealthAnalyticsCustomModule(), + ], + next_page_token="ghi", + ), + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[ + security_center_management.SecurityHealthAnalyticsCustomModule(), + security_center_management.SecurityHealthAnalyticsCustomModule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_security_health_analytics_custom_modules( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance( + i, security_center_management.SecurityHealthAnalyticsCustomModule + ) + for i in results + ) + + pages = list( + client.list_security_health_analytics_custom_modules( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest, + dict, + ], +) +def test_list_descendant_security_health_analytics_custom_modules_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10444,7 +13119,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest(request_ty # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( + return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( next_page_token="next_page_token_value", ) @@ -10452,25 +13127,25 @@ def test_list_effective_security_health_analytics_custom_modules_rest(request_ty response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_effective_security_health_analytics_custom_modules( + response = client.list_descendant_security_health_analytics_custom_modules( request ) # Establish that the response is the type that we expect. assert isinstance( - response, pagers.ListEffectiveSecurityHealthAnalyticsCustomModulesPager + response, pagers.ListDescendantSecurityHealthAnalyticsCustomModulesPager ) assert response.next_page_token == "next_page_token_value" -def test_list_effective_security_health_analytics_custom_modules_rest_use_cached_wrapped_rpc(): +def test_list_descendant_security_health_analytics_custom_modules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10485,7 +13160,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_use_cached # Ensure method has been cached assert ( - client._transport.list_effective_security_health_analytics_custom_modules + client._transport.list_descendant_security_health_analytics_custom_modules in client._transport._wrapped_methods ) @@ -10495,24 +13170,24 @@ def test_list_effective_security_health_analytics_custom_modules_rest_use_cached "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_effective_security_health_analytics_custom_modules + client._transport.list_descendant_security_health_analytics_custom_modules ] = mock_rpc request = {} - client.list_effective_security_health_analytics_custom_modules(request) + client.list_descendant_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_effective_security_health_analytics_custom_modules(request) + client.list_descendant_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_effective_security_health_analytics_custom_modules_rest_required_fields( - request_type=security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest, +def test_list_descendant_security_health_analytics_custom_modules_rest_required_fields( + request_type=security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -10528,7 +13203,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_required_f unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_effective_security_health_analytics_custom_modules._get_unset_required_fields( + ).list_descendant_security_health_analytics_custom_modules._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -10539,7 +13214,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_required_f unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_effective_security_health_analytics_custom_modules._get_unset_required_fields( + ).list_descendant_security_health_analytics_custom_modules._get_unset_required_fields( jsonified_request ) # Check that path parameters and body parameters are not mixing in. @@ -10563,7 +13238,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_required_f # Designate an appropriate value for the returned response. return_value = ( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -10585,7 +13260,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_required_f response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -10593,7 +13268,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_required_f response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_effective_security_health_analytics_custom_modules( + response = client.list_descendant_security_health_analytics_custom_modules( request ) @@ -10602,12 +13277,12 @@ def test_list_effective_security_health_analytics_custom_modules_rest_required_f assert expected_params == actual_params -def test_list_effective_security_health_analytics_custom_modules_rest_unset_required_fields(): +def test_list_descendant_security_health_analytics_custom_modules_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_effective_security_health_analytics_custom_modules._get_unset_required_fields( + unset_fields = transport.list_descendant_security_health_analytics_custom_modules._get_unset_required_fields( {} ) assert set(unset_fields) == ( @@ -10622,14 +13297,16 @@ def test_list_effective_security_health_analytics_custom_modules_rest_unset_requ @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_effective_security_health_analytics_custom_modules_rest_interceptors( +def test_list_descendant_security_health_analytics_custom_modules_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -10638,15 +13315,15 @@ def test_list_effective_security_health_analytics_custom_modules_rest_intercepto path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_list_effective_security_health_analytics_custom_modules", + "post_list_descendant_security_health_analytics_custom_modules", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_list_effective_security_health_analytics_custom_modules", + "pre_list_descendant_security_health_analytics_custom_modules", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.pb( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest() + pb_message = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest.pb( + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest() ) transcode.return_value = { "method": "post", @@ -10658,12 +13335,12 @@ def test_list_effective_security_health_analytics_custom_modules_rest_intercepto req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.to_json( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + req.return_value._content = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.to_json( + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() ) request = ( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest() + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest() ) metadata = [ ("key", "val"), @@ -10671,10 +13348,10 @@ def test_list_effective_security_health_analytics_custom_modules_rest_intercepto ] pre.return_value = request, metadata post.return_value = ( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() ) - client.list_effective_security_health_analytics_custom_modules( + client.list_descendant_security_health_analytics_custom_modules( request, metadata=[ ("key", "val"), @@ -10686,9 +13363,9 @@ def test_list_effective_security_health_analytics_custom_modules_rest_intercepto post.assert_called_once() -def test_list_effective_security_health_analytics_custom_modules_rest_bad_request( +def test_list_descendant_security_health_analytics_custom_modules_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest, + request_type=security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10708,10 +13385,10 @@ def test_list_effective_security_health_analytics_custom_modules_rest_bad_reques response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_effective_security_health_analytics_custom_modules(request) + client.list_descendant_security_health_analytics_custom_modules(request) -def test_list_effective_security_health_analytics_custom_modules_rest_flattened(): +def test_list_descendant_security_health_analytics_custom_modules_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10721,7 +13398,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_flattened( with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = ( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse() + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() ) # get arguments that satisfy an http rule for this method @@ -10737,27 +13414,27 @@ def test_list_effective_security_health_analytics_custom_modules_rest_flattened( response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.pb( + return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_effective_security_health_analytics_custom_modules(**mock_args) + client.list_descendant_security_health_analytics_custom_modules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/effectiveSecurityHealthAnalyticsCustomModules" + "%s/v1/{parent=projects/*/locations/*}/securityHealthAnalyticsCustomModules:listDescendant" % client.transport._host, args[1], ) -def test_list_effective_security_health_analytics_custom_modules_rest_flattened_error( +def test_list_descendant_security_health_analytics_custom_modules_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -10768,13 +13445,13 @@ def test_list_effective_security_health_analytics_custom_modules_rest_flattened_ # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_effective_security_health_analytics_custom_modules( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest(), + client.list_descendant_security_health_analytics_custom_modules( + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest(), parent="parent_value", ) -def test_list_effective_security_health_analytics_custom_modules_rest_pager( +def test_list_descendant_security_health_analytics_custom_modules_rest_pager( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -10788,28 +13465,28 @@ def test_list_effective_security_health_analytics_custom_modules_rest_pager( # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( - effective_security_health_analytics_custom_modules=[ - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[ + security_center_management.SecurityHealthAnalyticsCustomModule(), + security_center_management.SecurityHealthAnalyticsCustomModule(), + security_center_management.SecurityHealthAnalyticsCustomModule(), ], next_page_token="abc", ), - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( - effective_security_health_analytics_custom_modules=[], + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[], next_page_token="def", ), - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( - effective_security_health_analytics_custom_modules=[ - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[ + security_center_management.SecurityHealthAnalyticsCustomModule(), ], next_page_token="ghi", ), - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse( - effective_security_health_analytics_custom_modules=[ - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule(), + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( + security_health_analytics_custom_modules=[ + security_center_management.SecurityHealthAnalyticsCustomModule(), + security_center_management.SecurityHealthAnalyticsCustomModule(), ], ), ) @@ -10818,7 +13495,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_pager( # Wrap the values into proper Response objs response = tuple( - security_center_management.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse.to_json( + security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.to_json( x ) for x in response @@ -10831,7 +13508,7 @@ def test_list_effective_security_health_analytics_custom_modules_rest_pager( sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_effective_security_health_analytics_custom_modules( + pager = client.list_descendant_security_health_analytics_custom_modules( request=sample_request ) @@ -10839,14 +13516,13 @@ def test_list_effective_security_health_analytics_custom_modules_rest_pager( assert len(results) == 6 assert all( isinstance( - i, - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule, + i, security_center_management.SecurityHealthAnalyticsCustomModule ) for i in results ) pages = list( - client.list_effective_security_health_analytics_custom_modules( + client.list_descendant_security_health_analytics_custom_modules( request=sample_request ).pages ) @@ -10857,11 +13533,11 @@ def test_list_effective_security_health_analytics_custom_modules_rest_pager( @pytest.mark.parametrize( "request_type", [ - security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, + security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, dict, ], ) -def test_get_effective_security_health_analytics_custom_module_rest(request_type): +def test_get_security_health_analytics_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10869,17 +13545,19 @@ def test_get_effective_security_health_analytics_custom_module_rest(request_type # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/effectiveSecurityHealthAnalyticsCustomModules/sample3" + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EffectiveSecurityHealthAnalyticsCustomModule( + return_value = security_center_management.SecurityHealthAnalyticsCustomModule( name="name_value", - enablement_state=security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, display_name="display_name_value", + enablement_state=security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, + last_editor="last_editor_value", + ancestor_module="ancestor_module_value", ) # Wrap the value into a proper Response obj @@ -10887,7 +13565,7 @@ def test_get_effective_security_health_analytics_custom_module_rest(request_type response_value.status_code = 200 # Convert return value to protobuf type return_value = ( - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.pb( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( return_value ) ) @@ -10895,22 +13573,23 @@ def test_get_effective_security_health_analytics_custom_module_rest(request_type response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_effective_security_health_analytics_custom_module(request) + response = client.get_security_health_analytics_custom_module(request) # Establish that the response is the type that we expect. assert isinstance( - response, - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule, + response, security_center_management.SecurityHealthAnalyticsCustomModule ) assert response.name == "name_value" + assert response.display_name == "display_name_value" assert ( response.enablement_state - == security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.EnablementState.ENABLED + == security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED ) - assert response.display_name == "display_name_value" + assert response.last_editor == "last_editor_value" + assert response.ancestor_module == "ancestor_module_value" -def test_get_effective_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): +def test_get_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10925,7 +13604,7 @@ def test_get_effective_security_health_analytics_custom_module_rest_use_cached_w # Ensure method has been cached assert ( - client._transport.get_effective_security_health_analytics_custom_module + client._transport.get_security_health_analytics_custom_module in client._transport._wrapped_methods ) @@ -10935,24 +13614,24 @@ def test_get_effective_security_health_analytics_custom_module_rest_use_cached_w "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_effective_security_health_analytics_custom_module + client._transport.get_security_health_analytics_custom_module ] = mock_rpc request = {} - client.get_effective_security_health_analytics_custom_module(request) + client.get_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_effective_security_health_analytics_custom_module(request) + client.get_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_effective_security_health_analytics_custom_module_rest_required_fields( - request_type=security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, +def test_get_security_health_analytics_custom_module_rest_required_fields( + request_type=security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -10968,7 +13647,7 @@ def test_get_effective_security_health_analytics_custom_module_rest_required_fie unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_effective_security_health_analytics_custom_module._get_unset_required_fields( + ).get_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -10979,7 +13658,7 @@ def test_get_effective_security_health_analytics_custom_module_rest_required_fie unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_effective_security_health_analytics_custom_module._get_unset_required_fields( + ).get_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -10995,9 +13674,7 @@ def test_get_effective_security_health_analytics_custom_module_rest_required_fie request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11018,43 +13695,45 @@ def test_get_effective_security_health_analytics_custom_module_rest_required_fie response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_effective_security_health_analytics_custom_module( - request - ) + response = client.get_security_health_analytics_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_effective_security_health_analytics_custom_module_rest_unset_required_fields(): +def test_get_security_health_analytics_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_effective_security_health_analytics_custom_module._get_unset_required_fields( + unset_fields = transport.get_security_health_analytics_custom_module._get_unset_required_fields( {} ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_effective_security_health_analytics_custom_module_rest_interceptors( +def test_get_security_health_analytics_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -11063,15 +13742,15 @@ def test_get_effective_security_health_analytics_custom_module_rest_interceptors path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_get_effective_security_health_analytics_custom_module", + "post_get_security_health_analytics_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_get_effective_security_health_analytics_custom_module", + "pre_get_security_health_analytics_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest.pb( - security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest() + pb_message = security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest.pb( + security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -11083,12 +13762,14 @@ def test_get_effective_security_health_analytics_custom_module_rest_interceptors req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.to_json( - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() + req.return_value._content = ( + security_center_management.SecurityHealthAnalyticsCustomModule.to_json( + security_center_management.SecurityHealthAnalyticsCustomModule() + ) ) request = ( - security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest() + security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -11096,10 +13777,10 @@ def test_get_effective_security_health_analytics_custom_module_rest_interceptors ] pre.return_value = request, metadata post.return_value = ( - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() + security_center_management.SecurityHealthAnalyticsCustomModule() ) - client.get_effective_security_health_analytics_custom_module( + client.get_security_health_analytics_custom_module( request, metadata=[ ("key", "val"), @@ -11111,9 +13792,9 @@ def test_get_effective_security_health_analytics_custom_module_rest_interceptors post.assert_called_once() -def test_get_effective_security_health_analytics_custom_module_rest_bad_request( +def test_get_security_health_analytics_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest, + request_type=security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11122,7 +13803,7 @@ def test_get_effective_security_health_analytics_custom_module_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/effectiveSecurityHealthAnalyticsCustomModules/sample3" + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" } request = request_type(**request_init) @@ -11135,10 +13816,10 @@ def test_get_effective_security_health_analytics_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_effective_security_health_analytics_custom_module(request) + client.get_security_health_analytics_custom_module(request) -def test_get_effective_security_health_analytics_custom_module_rest_flattened(): +def test_get_security_health_analytics_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11147,13 +13828,11 @@ def test_get_effective_security_health_analytics_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule() - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/effectiveSecurityHealthAnalyticsCustomModules/sample3" + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" } # get truthy value for each flattened field @@ -11167,7 +13846,7 @@ def test_get_effective_security_health_analytics_custom_module_rest_flattened(): response_value.status_code = 200 # Convert return value to protobuf type return_value = ( - security_center_management.EffectiveSecurityHealthAnalyticsCustomModule.pb( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( return_value ) ) @@ -11175,20 +13854,20 @@ def test_get_effective_security_health_analytics_custom_module_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_effective_security_health_analytics_custom_module(**mock_args) + client.get_security_health_analytics_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/effectiveSecurityHealthAnalyticsCustomModules/*}" + "%s/v1/{name=projects/*/locations/*/securityHealthAnalyticsCustomModules/*}" % client.transport._host, args[1], ) -def test_get_effective_security_health_analytics_custom_module_rest_flattened_error( +def test_get_security_health_analytics_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -11199,13 +13878,13 @@ def test_get_effective_security_health_analytics_custom_module_rest_flattened_er # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_effective_security_health_analytics_custom_module( - security_center_management.GetEffectiveSecurityHealthAnalyticsCustomModuleRequest(), + client.get_security_health_analytics_custom_module( + security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest(), name="name_value", ) -def test_get_effective_security_health_analytics_custom_module_rest_error(): +def test_get_security_health_analytics_custom_module_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11214,11 +13893,11 @@ def test_get_effective_security_health_analytics_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", [ - security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, + security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest, dict, ], ) -def test_list_security_health_analytics_custom_modules_rest(request_type): +def test_create_security_health_analytics_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11226,36 +13905,152 @@ def test_list_security_health_analytics_custom_modules_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["security_health_analytics_custom_module"] = { + "name": "name_value", + "display_name": "display_name_value", + "enablement_state": 1, + "update_time": {"seconds": 751, "nanos": 543}, + "last_editor": "last_editor_value", + "ancestor_module": "ancestor_module_value", + "custom_config": { + "predicate": { + "expression": "expression_value", + "title": "title_value", + "description": "description_value", + "location": "location_value", + }, + "custom_output": { + "properties": [{"name": "name_value", "value_expression": {}}] + }, + "resource_selector": { + "resource_types": ["resource_types_value1", "resource_types_value2"] + }, + "severity": 1, + "description": "description_value", + "recommendation": "recommendation_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest.meta.fields[ + "security_health_analytics_custom_module" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_health_analytics_custom_module" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["security_health_analytics_custom_module"][field]), + ): + del request_init["security_health_analytics_custom_module"][field][ + i + ][subfield] + else: + del request_init["security_health_analytics_custom_module"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( - next_page_token="next_page_token_value", - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule( + name="name_value", + display_name="display_name_value", + enablement_state=security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, + last_editor="last_editor_value", + ancestor_module="ancestor_module_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_security_health_analytics_custom_modules(request) + response = client.create_security_health_analytics_custom_module(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSecurityHealthAnalyticsCustomModulesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance( + response, security_center_management.SecurityHealthAnalyticsCustomModule + ) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert ( + response.enablement_state + == security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED + ) + assert response.last_editor == "last_editor_value" + assert response.ancestor_module == "ancestor_module_value" -def test_list_security_health_analytics_custom_modules_rest_use_cached_wrapped_rpc(): +def test_create_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11270,7 +14065,7 @@ def test_list_security_health_analytics_custom_modules_rest_use_cached_wrapped_r # Ensure method has been cached assert ( - client._transport.list_security_health_analytics_custom_modules + client._transport.create_security_health_analytics_custom_module in client._transport._wrapped_methods ) @@ -11280,24 +14075,24 @@ def test_list_security_health_analytics_custom_modules_rest_use_cached_wrapped_r "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_security_health_analytics_custom_modules + client._transport.create_security_health_analytics_custom_module ] = mock_rpc request = {} - client.list_security_health_analytics_custom_modules(request) + client.create_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_security_health_analytics_custom_modules(request) + client.create_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_security_health_analytics_custom_modules_rest_required_fields( - request_type=security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, +def test_create_security_health_analytics_custom_module_rest_required_fields( + request_type=security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -11313,7 +14108,7 @@ def test_list_security_health_analytics_custom_modules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_security_health_analytics_custom_modules._get_unset_required_fields( + ).create_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -11324,16 +14119,11 @@ def test_list_security_health_analytics_custom_modules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_security_health_analytics_custom_modules._get_unset_required_fields( - jsonified_request - ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) + ).create_security_health_analytics_custom_module._get_unset_required_fields( + jsonified_request ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11347,9 +14137,7 @@ def test_list_security_health_analytics_custom_modules_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11361,58 +14149,63 @@ def test_list_security_health_analytics_custom_modules_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_security_health_analytics_custom_modules(request) + response = client.create_security_health_analytics_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_security_health_analytics_custom_modules_rest_unset_required_fields(): +def test_create_security_health_analytics_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_security_health_analytics_custom_modules._get_unset_required_fields( + unset_fields = transport.create_security_health_analytics_custom_module._get_unset_required_fields( {} ) assert set(unset_fields) == ( - set( + set(("validateOnly",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "securityHealthAnalyticsCustomModule", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_security_health_analytics_custom_modules_rest_interceptors( +def test_create_security_health_analytics_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -11421,15 +14214,15 @@ def test_list_security_health_analytics_custom_modules_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_list_security_health_analytics_custom_modules", + "post_create_security_health_analytics_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_list_security_health_analytics_custom_modules", + "pre_create_security_health_analytics_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest.pb( - security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest() + pb_message = security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest.pb( + security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -11441,12 +14234,14 @@ def test_list_security_health_analytics_custom_modules_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.to_json( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() + req.return_value._content = ( + security_center_management.SecurityHealthAnalyticsCustomModule.to_json( + security_center_management.SecurityHealthAnalyticsCustomModule() + ) ) request = ( - security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest() + security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -11454,10 +14249,10 @@ def test_list_security_health_analytics_custom_modules_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() + security_center_management.SecurityHealthAnalyticsCustomModule() ) - client.list_security_health_analytics_custom_modules( + client.create_security_health_analytics_custom_module( request, metadata=[ ("key", "val"), @@ -11469,9 +14264,9 @@ def test_list_security_health_analytics_custom_modules_rest_interceptors( post.assert_called_once() -def test_list_security_health_analytics_custom_modules_rest_bad_request( +def test_create_security_health_analytics_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest, + request_type=security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11491,10 +14286,10 @@ def test_list_security_health_analytics_custom_modules_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_security_health_analytics_custom_modules(request) + client.create_security_health_analytics_custom_module(request) -def test_list_security_health_analytics_custom_modules_rest_flattened(): +def test_create_security_health_analytics_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11503,9 +14298,7 @@ def test_list_security_health_analytics_custom_modules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse() - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -11513,6 +14306,9 @@ def test_list_security_health_analytics_custom_modules_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( + name="name_value" + ), ) mock_args.update(sample_request) @@ -11520,14 +14316,16 @@ def test_list_security_health_analytics_custom_modules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_security_health_analytics_custom_modules(**mock_args) + client.create_security_health_analytics_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -11540,7 +14338,7 @@ def test_list_security_health_analytics_custom_modules_rest_flattened(): ) -def test_list_security_health_analytics_custom_modules_rest_flattened_error( +def test_create_security_health_analytics_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -11551,138 +14349,186 @@ def test_list_security_health_analytics_custom_modules_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_security_health_analytics_custom_modules( - security_center_management.ListSecurityHealthAnalyticsCustomModulesRequest(), + client.create_security_health_analytics_custom_module( + security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest(), parent="parent_value", + security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( + name="name_value" + ), ) -def test_list_security_health_analytics_custom_modules_rest_pager( - transport: str = "rest", -): +def test_create_security_health_analytics_custom_module_rest_error(): client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[ - security_center_management.SecurityHealthAnalyticsCustomModule(), - security_center_management.SecurityHealthAnalyticsCustomModule(), - security_center_management.SecurityHealthAnalyticsCustomModule(), - ], - next_page_token="abc", - ), - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[], - next_page_token="def", - ), - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[ - security_center_management.SecurityHealthAnalyticsCustomModule(), - ], - next_page_token="ghi", - ), - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[ - security_center_management.SecurityHealthAnalyticsCustomModule(), - security_center_management.SecurityHealthAnalyticsCustomModule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - security_center_management.ListSecurityHealthAnalyticsCustomModulesResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_security_health_analytics_custom_modules( - request=sample_request - ) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance( - i, security_center_management.SecurityHealthAnalyticsCustomModule - ) - for i in results - ) - - pages = list( - client.list_security_health_analytics_custom_modules( - request=sample_request - ).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest, + security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, dict, ], ) -def test_list_descendant_security_health_analytics_custom_modules_rest(request_type): +def test_update_security_health_analytics_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # send a request that will satisfy transcoding + request_init = { + "security_health_analytics_custom_module": { + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" + } + } + request_init["security_health_analytics_custom_module"] = { + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3", + "display_name": "display_name_value", + "enablement_state": 1, + "update_time": {"seconds": 751, "nanos": 543}, + "last_editor": "last_editor_value", + "ancestor_module": "ancestor_module_value", + "custom_config": { + "predicate": { + "expression": "expression_value", + "title": "title_value", + "description": "description_value", + "location": "location_value", + }, + "custom_output": { + "properties": [{"name": "name_value", "value_expression": {}}] + }, + "resource_selector": { + "resource_types": ["resource_types_value1", "resource_types_value2"] + }, + "severity": 1, + "description": "description_value", + "recommendation": "recommendation_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest.meta.fields[ + "security_health_analytics_custom_module" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_health_analytics_custom_module" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["security_health_analytics_custom_module"][field]), + ): + del request_init["security_health_analytics_custom_module"][field][ + i + ][subfield] + else: + del request_init["security_health_analytics_custom_module"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( - next_page_token="next_page_token_value", + return_value = security_center_management.SecurityHealthAnalyticsCustomModule( + name="name_value", + display_name="display_name_value", + enablement_state=security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, + last_editor="last_editor_value", + ancestor_module="ancestor_module_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_descendant_security_health_analytics_custom_modules( - request - ) + response = client.update_security_health_analytics_custom_module(request) # Establish that the response is the type that we expect. assert isinstance( - response, pagers.ListDescendantSecurityHealthAnalyticsCustomModulesPager + response, security_center_management.SecurityHealthAnalyticsCustomModule ) - assert response.next_page_token == "next_page_token_value" + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert ( + response.enablement_state + == security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED + ) + assert response.last_editor == "last_editor_value" + assert response.ancestor_module == "ancestor_module_value" -def test_list_descendant_security_health_analytics_custom_modules_rest_use_cached_wrapped_rpc(): +def test_update_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11697,7 +14543,7 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_use_cache # Ensure method has been cached assert ( - client._transport.list_descendant_security_health_analytics_custom_modules + client._transport.update_security_health_analytics_custom_module in client._transport._wrapped_methods ) @@ -11707,29 +14553,28 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_use_cache "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_descendant_security_health_analytics_custom_modules + client._transport.update_security_health_analytics_custom_module ] = mock_rpc request = {} - client.list_descendant_security_health_analytics_custom_modules(request) + client.update_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_descendant_security_health_analytics_custom_modules(request) + client.update_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_descendant_security_health_analytics_custom_modules_rest_required_fields( - request_type=security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest, +def test_update_security_health_analytics_custom_module_rest_required_fields( + request_type=security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11740,32 +14585,28 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_required_ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_descendant_security_health_analytics_custom_modules._get_unset_required_fields( + ).update_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_descendant_security_health_analytics_custom_modules._get_unset_required_fields( + ).update_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11774,9 +14615,7 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_required_ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11788,60 +14627,68 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_required_ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_descendant_security_health_analytics_custom_modules( - request - ) + response = client.update_security_health_analytics_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_descendant_security_health_analytics_custom_modules_rest_unset_required_fields(): +def test_update_security_health_analytics_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_descendant_security_health_analytics_custom_modules._get_unset_required_fields( + unset_fields = transport.update_security_health_analytics_custom_module._get_unset_required_fields( {} ) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "securityHealthAnalyticsCustomModule", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_descendant_security_health_analytics_custom_modules_rest_interceptors( +def test_update_security_health_analytics_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -11850,15 +14697,15 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_intercept path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_list_descendant_security_health_analytics_custom_modules", + "post_update_security_health_analytics_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_list_descendant_security_health_analytics_custom_modules", + "pre_update_security_health_analytics_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest.pb( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest() + pb_message = security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest.pb( + security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -11870,12 +14717,14 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_intercept req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.to_json( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() + req.return_value._content = ( + security_center_management.SecurityHealthAnalyticsCustomModule.to_json( + security_center_management.SecurityHealthAnalyticsCustomModule() + ) ) request = ( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest() + security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -11883,10 +14732,10 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_intercept ] pre.return_value = request, metadata post.return_value = ( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() + security_center_management.SecurityHealthAnalyticsCustomModule() ) - client.list_descendant_security_health_analytics_custom_modules( + client.update_security_health_analytics_custom_module( request, metadata=[ ("key", "val"), @@ -11898,9 +14747,9 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_intercept post.assert_called_once() -def test_list_descendant_security_health_analytics_custom_modules_rest_bad_request( +def test_update_security_health_analytics_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest, + request_type=security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11908,7 +14757,11 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_bad_reque ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "security_health_analytics_custom_module": { + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11920,10 +14773,10 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_bad_reque response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_descendant_security_health_analytics_custom_modules(request) + client.update_security_health_analytics_custom_module(request) -def test_list_descendant_security_health_analytics_custom_modules_rest_flattened(): +def test_update_security_health_analytics_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11932,16 +14785,21 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_flattened # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse() - ) + return_value = security_center_management.SecurityHealthAnalyticsCustomModule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "security_health_analytics_custom_module": { + "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -11949,27 +14807,29 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_flattened response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.SecurityHealthAnalyticsCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_descendant_security_health_analytics_custom_modules(**mock_args) + client.update_security_health_analytics_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/securityHealthAnalyticsCustomModules:listDescendant" + "%s/v1/{security_health_analytics_custom_module.name=projects/*/locations/*/securityHealthAnalyticsCustomModules/*}" % client.transport._host, args[1], ) -def test_list_descendant_security_health_analytics_custom_modules_rest_flattened_error( +def test_update_security_health_analytics_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -11980,99 +14840,29 @@ def test_list_descendant_security_health_analytics_custom_modules_rest_flattened # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_descendant_security_health_analytics_custom_modules( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesRequest(), - parent="parent_value", - ) - - -def test_list_descendant_security_health_analytics_custom_modules_rest_pager( - transport: str = "rest", -): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[ - security_center_management.SecurityHealthAnalyticsCustomModule(), - security_center_management.SecurityHealthAnalyticsCustomModule(), - security_center_management.SecurityHealthAnalyticsCustomModule(), - ], - next_page_token="abc", - ), - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[], - next_page_token="def", - ), - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[ - security_center_management.SecurityHealthAnalyticsCustomModule(), - ], - next_page_token="ghi", - ), - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse( - security_health_analytics_custom_modules=[ - security_center_management.SecurityHealthAnalyticsCustomModule(), - security_center_management.SecurityHealthAnalyticsCustomModule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - security_center_management.ListDescendantSecurityHealthAnalyticsCustomModulesResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_descendant_security_health_analytics_custom_modules( - request=sample_request - ) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance( - i, security_center_management.SecurityHealthAnalyticsCustomModule - ) - for i in results - ) - - pages = list( - client.list_descendant_security_health_analytics_custom_modules( - request=sample_request - ).pages + client.update_security_health_analytics_custom_module( + security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest(), + security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + + +def test_update_security_health_analytics_custom_module_rest_error(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, + security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest, dict, ], ) -def test_get_security_health_analytics_custom_module_rest(request_type): +def test_delete_security_health_analytics_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12087,44 +14877,22 @@ def test_get_security_health_analytics_custom_module_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value", - display_name="display_name_value", - enablement_state=security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, - last_editor="last_editor_value", - ancestor_module="ancestor_module_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_security_health_analytics_custom_module(request) + response = client.delete_security_health_analytics_custom_module(request) # Establish that the response is the type that we expect. - assert isinstance( - response, security_center_management.SecurityHealthAnalyticsCustomModule - ) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert ( - response.enablement_state - == security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED - ) - assert response.last_editor == "last_editor_value" - assert response.ancestor_module == "ancestor_module_value" + assert response is None -def test_get_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): +def test_delete_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12139,7 +14907,7 @@ def test_get_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.get_security_health_analytics_custom_module + client._transport.delete_security_health_analytics_custom_module in client._transport._wrapped_methods ) @@ -12149,24 +14917,24 @@ def test_get_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_security_health_analytics_custom_module + client._transport.delete_security_health_analytics_custom_module ] = mock_rpc request = {} - client.get_security_health_analytics_custom_module(request) + client.delete_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_security_health_analytics_custom_module(request) + client.delete_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_security_health_analytics_custom_module_rest_required_fields( - request_type=security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, +def test_delete_security_health_analytics_custom_module_rest_required_fields( + request_type=security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -12182,7 +14950,7 @@ def test_get_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_security_health_analytics_custom_module._get_unset_required_fields( + ).delete_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -12193,9 +14961,11 @@ def test_get_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_security_health_analytics_custom_module._get_unset_required_fields( + ).delete_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12209,7 +14979,7 @@ def test_get_security_health_analytics_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12221,52 +14991,47 @@ def test_get_security_health_analytics_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_security_health_analytics_custom_module(request) + response = client.delete_security_health_analytics_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_security_health_analytics_custom_module_rest_unset_required_fields(): +def test_delete_security_health_analytics_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_security_health_analytics_custom_module._get_unset_required_fields( + unset_fields = transport.delete_security_health_analytics_custom_module._get_unset_required_fields( {} ) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("validateOnly",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_security_health_analytics_custom_module_rest_interceptors( +def test_delete_security_health_analytics_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -12275,15 +15040,11 @@ def test_get_security_health_analytics_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_get_security_health_analytics_custom_module", - ) as post, mock.patch.object( - transports.SecurityCenterManagementRestInterceptor, - "pre_get_security_health_analytics_custom_module", + "pre_delete_security_health_analytics_custom_module", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest.pb( - security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest() + pb_message = security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest.pb( + security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -12295,25 +15056,17 @@ def test_get_security_health_analytics_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - security_center_management.SecurityHealthAnalyticsCustomModule.to_json( - security_center_management.SecurityHealthAnalyticsCustomModule() - ) - ) request = ( - security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest() + security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest() ) metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule() - ) - client.get_security_health_analytics_custom_module( + client.delete_security_health_analytics_custom_module( request, metadata=[ ("key", "val"), @@ -12322,12 +15075,11 @@ def test_get_security_health_analytics_custom_module_rest_interceptors( ) pre.assert_called_once() - post.assert_called_once() -def test_get_security_health_analytics_custom_module_rest_bad_request( +def test_delete_security_health_analytics_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest, + request_type=security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12349,10 +15101,10 @@ def test_get_security_health_analytics_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_security_health_analytics_custom_module(request) + client.delete_security_health_analytics_custom_module(request) -def test_get_security_health_analytics_custom_module_rest_flattened(): +def test_delete_security_health_analytics_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12361,7 +15113,7 @@ def test_get_security_health_analytics_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -12377,17 +15129,11 @@ def test_get_security_health_analytics_custom_module_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_security_health_analytics_custom_module(**mock_args) + client.delete_security_health_analytics_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -12400,7 +15146,7 @@ def test_get_security_health_analytics_custom_module_rest_flattened(): ) -def test_get_security_health_analytics_custom_module_rest_flattened_error( +def test_delete_security_health_analytics_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -12411,13 +15157,13 @@ def test_get_security_health_analytics_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_security_health_analytics_custom_module( - security_center_management.GetSecurityHealthAnalyticsCustomModuleRequest(), + client.delete_security_health_analytics_custom_module( + security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest(), name="name_value", ) -def test_get_security_health_analytics_custom_module_rest_error(): +def test_delete_security_health_analytics_custom_module_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -12425,165 +15171,49 @@ def test_get_security_health_analytics_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", - [ - security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest, - dict, - ], -) -def test_create_security_health_analytics_custom_module_rest(request_type): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["security_health_analytics_custom_module"] = { - "name": "name_value", - "display_name": "display_name_value", - "enablement_state": 1, - "update_time": {"seconds": 751, "nanos": 543}, - "last_editor": "last_editor_value", - "ancestor_module": "ancestor_module_value", - "custom_config": { - "predicate": { - "expression": "expression_value", - "title": "title_value", - "description": "description_value", - "location": "location_value", - }, - "custom_output": { - "properties": [{"name": "name_value", "value_expression": {}}] - }, - "resource_selector": { - "resource_types": ["resource_types_value1", "resource_types_value2"] - }, - "severity": 1, - "description": "description_value", - "recommendation": "recommendation_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest.meta.fields[ - "security_health_analytics_custom_module" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "security_health_analytics_custom_module" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, - len(request_init["security_health_analytics_custom_module"][field]), - ): - del request_init["security_health_analytics_custom_module"][field][ - i - ][subfield] - else: - del request_init["security_health_analytics_custom_module"][field][ - subfield - ] + [ + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest, + dict, + ], +) +def test_simulate_security_health_analytics_custom_module_rest(request_type): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value", - display_name="display_name_value", - enablement_state=security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, - last_editor="last_editor_value", - ancestor_module="ancestor_module_value", + return_value = ( + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) + return_value = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_security_health_analytics_custom_module(request) + response = client.simulate_security_health_analytics_custom_module(request) # Establish that the response is the type that we expect. assert isinstance( - response, security_center_management.SecurityHealthAnalyticsCustomModule - ) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert ( - response.enablement_state - == security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED + response, + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse, ) - assert response.last_editor == "last_editor_value" - assert response.ancestor_module == "ancestor_module_value" -def test_create_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): +def test_simulate_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12598,7 +15228,7 @@ def test_create_security_health_analytics_custom_module_rest_use_cached_wrapped_ # Ensure method has been cached assert ( - client._transport.create_security_health_analytics_custom_module + client._transport.simulate_security_health_analytics_custom_module in client._transport._wrapped_methods ) @@ -12608,24 +15238,24 @@ def test_create_security_health_analytics_custom_module_rest_use_cached_wrapped_ "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_security_health_analytics_custom_module + client._transport.simulate_security_health_analytics_custom_module ] = mock_rpc request = {} - client.create_security_health_analytics_custom_module(request) + client.simulate_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_security_health_analytics_custom_module(request) + client.simulate_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_security_health_analytics_custom_module_rest_required_fields( - request_type=security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest, +def test_simulate_security_health_analytics_custom_module_rest_required_fields( + request_type=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -12641,7 +15271,7 @@ def test_create_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_security_health_analytics_custom_module._get_unset_required_fields( + ).simulate_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -12652,11 +15282,9 @@ def test_create_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_security_health_analytics_custom_module._get_unset_required_fields( + ).simulate_security_health_analytics_custom_module._get_unset_required_fields( jsonified_request ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12670,7 +15298,9 @@ def test_create_security_health_analytics_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule() + return_value = ( + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12692,51 +15322,52 @@ def test_create_security_health_analytics_custom_module_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) + return_value = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_security_health_analytics_custom_module(request) + response = client.simulate_security_health_analytics_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_security_health_analytics_custom_module_rest_unset_required_fields(): +def test_simulate_security_health_analytics_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_security_health_analytics_custom_module._get_unset_required_fields( + unset_fields = transport.simulate_security_health_analytics_custom_module._get_unset_required_fields( {} ) assert set(unset_fields) == ( - set(("validateOnly",)) + set(()) & set( ( "parent", - "securityHealthAnalyticsCustomModule", + "customConfig", + "resource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_security_health_analytics_custom_module_rest_interceptors( +def test_simulate_security_health_analytics_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -12745,15 +15376,15 @@ def test_create_security_health_analytics_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_create_security_health_analytics_custom_module", + "post_simulate_security_health_analytics_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_create_security_health_analytics_custom_module", + "pre_simulate_security_health_analytics_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest.pb( - security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest() + pb_message = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest.pb( + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -12765,14 +15396,12 @@ def test_create_security_health_analytics_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - security_center_management.SecurityHealthAnalyticsCustomModule.to_json( - security_center_management.SecurityHealthAnalyticsCustomModule() - ) + req.return_value._content = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.to_json( + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() ) request = ( - security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest() + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -12780,10 +15409,10 @@ def test_create_security_health_analytics_custom_module_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule() + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() ) - client.create_security_health_analytics_custom_module( + client.simulate_security_health_analytics_custom_module( request, metadata=[ ("key", "val"), @@ -12795,9 +15424,9 @@ def test_create_security_health_analytics_custom_module_rest_interceptors( post.assert_called_once() -def test_create_security_health_analytics_custom_module_rest_bad_request( +def test_simulate_security_health_analytics_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest, + request_type=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12817,10 +15446,10 @@ def test_create_security_health_analytics_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_security_health_analytics_custom_module(request) + client.simulate_security_health_analytics_custom_module(request) -def test_create_security_health_analytics_custom_module_rest_flattened(): +def test_simulate_security_health_analytics_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12829,7 +15458,9 @@ def test_create_security_health_analytics_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule() + return_value = ( + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + ) # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -12837,8 +15468,11 @@ def test_create_security_health_analytics_custom_module_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value" + custom_config=security_center_management.CustomConfig( + predicate=expr_pb2.Expr(expression="expression_value") + ), + resource=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest.SimulatedResource( + resource_type="resource_type_value" ), ) mock_args.update(sample_request) @@ -12847,29 +15481,27 @@ def test_create_security_health_analytics_custom_module_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) + return_value = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_security_health_analytics_custom_module(**mock_args) + client.simulate_security_health_analytics_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/securityHealthAnalyticsCustomModules" + "%s/v1/{parent=projects/*/locations/*}/securityHealthAnalyticsCustomModules:simulate" % client.transport._host, args[1], ) -def test_create_security_health_analytics_custom_module_rest_flattened_error( +def test_simulate_security_health_analytics_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -12880,186 +15512,69 @@ def test_create_security_health_analytics_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_security_health_analytics_custom_module( - security_center_management.CreateSecurityHealthAnalyticsCustomModuleRequest(), + client.simulate_security_health_analytics_custom_module( + security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest(), parent="parent_value", - security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value" + custom_config=security_center_management.CustomConfig( + predicate=expr_pb2.Expr(expression="expression_value") + ), + resource=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest.SimulatedResource( + resource_type="resource_type_value" ), ) - -def test_create_security_health_analytics_custom_module_rest_error(): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, - dict, - ], -) -def test_update_security_health_analytics_custom_module_rest(request_type): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "security_health_analytics_custom_module": { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" - } - } - request_init["security_health_analytics_custom_module"] = { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3", - "display_name": "display_name_value", - "enablement_state": 1, - "update_time": {"seconds": 751, "nanos": 543}, - "last_editor": "last_editor_value", - "ancestor_module": "ancestor_module_value", - "custom_config": { - "predicate": { - "expression": "expression_value", - "title": "title_value", - "description": "description_value", - "location": "location_value", - }, - "custom_output": { - "properties": [{"name": "name_value", "value_expression": {}}] - }, - "resource_selector": { - "resource_types": ["resource_types_value1", "resource_types_value2"] - }, - "severity": 1, - "description": "description_value", - "recommendation": "recommendation_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest.meta.fields[ - "security_health_analytics_custom_module" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "security_health_analytics_custom_module" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, - len(request_init["security_health_analytics_custom_module"][field]), - ): - del request_init["security_health_analytics_custom_module"][field][ - i - ][subfield] - else: - del request_init["security_health_analytics_custom_module"][field][ - subfield - ] + +def test_simulate_security_health_analytics_custom_module_rest_error(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest, + dict, + ], +) +def test_list_effective_event_threat_detection_custom_modules_rest(request_type): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value", - display_name="display_name_value", - enablement_state=security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED, - last_editor="last_editor_value", - ancestor_module="ancestor_module_value", + return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) + return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_security_health_analytics_custom_module(request) + response = client.list_effective_event_threat_detection_custom_modules(request) # Establish that the response is the type that we expect. assert isinstance( - response, security_center_management.SecurityHealthAnalyticsCustomModule - ) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert ( - response.enablement_state - == security_center_management.SecurityHealthAnalyticsCustomModule.EnablementState.ENABLED + response, pagers.ListEffectiveEventThreatDetectionCustomModulesPager ) - assert response.last_editor == "last_editor_value" - assert response.ancestor_module == "ancestor_module_value" + assert response.next_page_token == "next_page_token_value" -def test_update_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): +def test_list_effective_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13074,7 +15589,7 @@ def test_update_security_health_analytics_custom_module_rest_use_cached_wrapped_ # Ensure method has been cached assert ( - client._transport.update_security_health_analytics_custom_module + client._transport.list_effective_event_threat_detection_custom_modules in client._transport._wrapped_methods ) @@ -13084,28 +15599,29 @@ def test_update_security_health_analytics_custom_module_rest_use_cached_wrapped_ "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_security_health_analytics_custom_module + client._transport.list_effective_event_threat_detection_custom_modules ] = mock_rpc request = {} - client.update_security_health_analytics_custom_module(request) + client.list_effective_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_security_health_analytics_custom_module(request) + client.list_effective_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_security_health_analytics_custom_module_rest_required_fields( - request_type=security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, +def test_list_effective_event_threat_detection_custom_modules_rest_required_fields( + request_type=security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13116,28 +15632,32 @@ def test_update_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_security_health_analytics_custom_module._get_unset_required_fields( + ).list_effective_event_threat_detection_custom_modules._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_security_health_analytics_custom_module._get_unset_required_fields( + ).list_effective_event_threat_detection_custom_modules._get_unset_required_fields( jsonified_request ) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13146,7 +15666,9 @@ def test_update_security_health_analytics_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule() + return_value = ( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13158,66 +15680,62 @@ def test_update_security_health_analytics_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) + return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_security_health_analytics_custom_module(request) + response = client.list_effective_event_threat_detection_custom_modules( + request + ) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_security_health_analytics_custom_module_rest_unset_required_fields(): +def test_list_effective_event_threat_detection_custom_modules_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_security_health_analytics_custom_module._get_unset_required_fields( + unset_fields = transport.list_effective_event_threat_detection_custom_modules._get_unset_required_fields( {} ) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "securityHealthAnalyticsCustomModule", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_security_health_analytics_custom_module_rest_interceptors( +def test_list_effective_event_threat_detection_custom_modules_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -13226,15 +15744,15 @@ def test_update_security_health_analytics_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_update_security_health_analytics_custom_module", + "post_list_effective_event_threat_detection_custom_modules", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_update_security_health_analytics_custom_module", + "pre_list_effective_event_threat_detection_custom_modules", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest.pb( - security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest() + pb_message = security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest.pb( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest() ) transcode.return_value = { "method": "post", @@ -13246,14 +15764,12 @@ def test_update_security_health_analytics_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - security_center_management.SecurityHealthAnalyticsCustomModule.to_json( - security_center_management.SecurityHealthAnalyticsCustomModule() - ) + req.return_value._content = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.to_json( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() ) request = ( - security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest() + security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest() ) metadata = [ ("key", "val"), @@ -13261,10 +15777,10 @@ def test_update_security_health_analytics_custom_module_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule() + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() ) - client.update_security_health_analytics_custom_module( + client.list_effective_event_threat_detection_custom_modules( request, metadata=[ ("key", "val"), @@ -13276,9 +15792,9 @@ def test_update_security_health_analytics_custom_module_rest_interceptors( post.assert_called_once() -def test_update_security_health_analytics_custom_module_rest_bad_request( +def test_list_effective_event_threat_detection_custom_modules_rest_bad_request( transport: str = "rest", - request_type=security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest, + request_type=security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13286,11 +15802,7 @@ def test_update_security_health_analytics_custom_module_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "security_health_analytics_custom_module": { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13302,10 +15814,10 @@ def test_update_security_health_analytics_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_security_health_analytics_custom_module(request) + client.list_effective_event_threat_detection_custom_modules(request) -def test_update_security_health_analytics_custom_module_rest_flattened(): +def test_list_effective_event_threat_detection_custom_modules_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13314,21 +15826,16 @@ def test_update_security_health_analytics_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.SecurityHealthAnalyticsCustomModule() + return_value = ( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() + ) # get arguments that satisfy an http rule for this method - sample_request = { - "security_health_analytics_custom_module": { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) @@ -13336,29 +15843,44 @@ def test_update_security_health_analytics_custom_module_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.SecurityHealthAnalyticsCustomModule.pb( - return_value - ) + return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_security_health_analytics_custom_module(**mock_args) + client.list_effective_event_threat_detection_custom_modules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{security_health_analytics_custom_module.name=projects/*/locations/*/securityHealthAnalyticsCustomModules/*}" + "%s/v1/{parent=projects/*/locations/*}/effectiveEventThreatDetectionCustomModules" % client.transport._host, args[1], ) -def test_update_security_health_analytics_custom_module_rest_flattened_error( +def test_list_effective_event_threat_detection_custom_modules_rest_flattened_error( + transport: str = "rest", +): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_effective_event_threat_detection_custom_modules( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest(), + parent="parent_value", + ) + + +def test_list_effective_event_threat_detection_custom_modules_rest_pager( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -13366,32 +15888,85 @@ def test_update_security_health_analytics_custom_module_rest_flattened_error( transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_security_health_analytics_custom_module( - security_center_management.UpdateSecurityHealthAnalyticsCustomModuleRequest(), - security_health_analytics_custom_module=security_center_management.SecurityHealthAnalyticsCustomModule( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( + effective_event_threat_detection_custom_modules=[ + security_center_management.EffectiveEventThreatDetectionCustomModule(), + security_center_management.EffectiveEventThreatDetectionCustomModule(), + security_center_management.EffectiveEventThreatDetectionCustomModule(), + ], + next_page_token="abc", + ), + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( + effective_event_threat_detection_custom_modules=[], + next_page_token="def", + ), + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( + effective_event_threat_detection_custom_modules=[ + security_center_management.EffectiveEventThreatDetectionCustomModule(), + ], + next_page_token="ghi", + ), + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( + effective_event_threat_detection_custom_modules=[ + security_center_management.EffectiveEventThreatDetectionCustomModule(), + security_center_management.EffectiveEventThreatDetectionCustomModule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_effective_event_threat_detection_custom_modules( + request=sample_request ) + results = list(pager) + assert len(results) == 6 + assert all( + isinstance( + i, security_center_management.EffectiveEventThreatDetectionCustomModule + ) + for i in results + ) -def test_update_security_health_analytics_custom_module_rest_error(): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list( + client.list_effective_event_threat_detection_custom_modules( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest, + security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest, dict, ], ) -def test_delete_security_health_analytics_custom_module_rest(request_type): +def test_get_effective_event_threat_detection_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13399,29 +15974,51 @@ def test_delete_security_health_analytics_custom_module_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" + "name": "projects/sample1/locations/sample2/effectiveEventThreatDetectionCustomModules/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = security_center_management.EffectiveEventThreatDetectionCustomModule( + name="name_value", + enablement_state=security_center_management.EffectiveEventThreatDetectionCustomModule.EnablementState.ENABLED, + type_="type__value", + display_name="display_name_value", + description="description_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = ( + security_center_management.EffectiveEventThreatDetectionCustomModule.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_security_health_analytics_custom_module(request) + response = client.get_effective_event_threat_detection_custom_module(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance( + response, security_center_management.EffectiveEventThreatDetectionCustomModule + ) + assert response.name == "name_value" + assert ( + response.enablement_state + == security_center_management.EffectiveEventThreatDetectionCustomModule.EnablementState.ENABLED + ) + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" -def test_delete_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): +def test_get_effective_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13436,7 +16033,7 @@ def test_delete_security_health_analytics_custom_module_rest_use_cached_wrapped_ # Ensure method has been cached assert ( - client._transport.delete_security_health_analytics_custom_module + client._transport.get_effective_event_threat_detection_custom_module in client._transport._wrapped_methods ) @@ -13446,24 +16043,24 @@ def test_delete_security_health_analytics_custom_module_rest_use_cached_wrapped_ "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_security_health_analytics_custom_module + client._transport.get_effective_event_threat_detection_custom_module ] = mock_rpc request = {} - client.delete_security_health_analytics_custom_module(request) + client.get_effective_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_security_health_analytics_custom_module(request) + client.get_effective_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_security_health_analytics_custom_module_rest_required_fields( - request_type=security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest, +def test_get_effective_event_threat_detection_custom_module_rest_required_fields( + request_type=security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -13479,7 +16076,7 @@ def test_delete_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_security_health_analytics_custom_module._get_unset_required_fields( + ).get_effective_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -13490,11 +16087,9 @@ def test_delete_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_security_health_analytics_custom_module._get_unset_required_fields( + ).get_effective_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13508,7 +16103,9 @@ def test_delete_security_health_analytics_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = ( + security_center_management.EffectiveEventThreatDetectionCustomModule() + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13520,45 +16117,56 @@ def test_delete_security_health_analytics_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = ( + security_center_management.EffectiveEventThreatDetectionCustomModule.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_security_health_analytics_custom_module(request) + response = client.get_effective_event_threat_detection_custom_module( + request + ) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_security_health_analytics_custom_module_rest_unset_required_fields(): +def test_get_effective_event_threat_detection_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_security_health_analytics_custom_module._get_unset_required_fields( + unset_fields = transport.get_effective_event_threat_detection_custom_module._get_unset_required_fields( {} ) - assert set(unset_fields) == (set(("validateOnly",)) & set(("name",))) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_security_health_analytics_custom_module_rest_interceptors( +def test_get_effective_event_threat_detection_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -13567,11 +16175,15 @@ def test_delete_security_health_analytics_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_delete_security_health_analytics_custom_module", + "post_get_effective_event_threat_detection_custom_module", + ) as post, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "pre_get_effective_event_threat_detection_custom_module", ) as pre: pre.assert_not_called() - pb_message = security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest.pb( - security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest() + post.assert_not_called() + pb_message = security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest.pb( + security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -13583,17 +16195,23 @@ def test_delete_security_health_analytics_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = security_center_management.EffectiveEventThreatDetectionCustomModule.to_json( + security_center_management.EffectiveEventThreatDetectionCustomModule() + ) request = ( - security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest() + security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest() ) metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = ( + security_center_management.EffectiveEventThreatDetectionCustomModule() + ) - client.delete_security_health_analytics_custom_module( + client.get_effective_event_threat_detection_custom_module( request, metadata=[ ("key", "val"), @@ -13602,11 +16220,12 @@ def test_delete_security_health_analytics_custom_module_rest_interceptors( ) pre.assert_called_once() + post.assert_called_once() -def test_delete_security_health_analytics_custom_module_rest_bad_request( +def test_get_effective_event_threat_detection_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest, + request_type=security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13615,7 +16234,7 @@ def test_delete_security_health_analytics_custom_module_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" + "name": "projects/sample1/locations/sample2/effectiveEventThreatDetectionCustomModules/sample3" } request = request_type(**request_init) @@ -13628,10 +16247,10 @@ def test_delete_security_health_analytics_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_security_health_analytics_custom_module(request) + client.get_effective_event_threat_detection_custom_module(request) -def test_delete_security_health_analytics_custom_module_rest_flattened(): +def test_get_effective_event_threat_detection_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13640,11 +16259,13 @@ def test_delete_security_health_analytics_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = ( + security_center_management.EffectiveEventThreatDetectionCustomModule() + ) # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/securityHealthAnalyticsCustomModules/sample3" + "name": "projects/sample1/locations/sample2/effectiveEventThreatDetectionCustomModules/sample3" } # get truthy value for each flattened field @@ -13656,24 +16277,30 @@ def test_delete_security_health_analytics_custom_module_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = ( + security_center_management.EffectiveEventThreatDetectionCustomModule.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_security_health_analytics_custom_module(**mock_args) + client.get_effective_event_threat_detection_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/securityHealthAnalyticsCustomModules/*}" + "%s/v1/{name=projects/*/locations/*/effectiveEventThreatDetectionCustomModules/*}" % client.transport._host, args[1], ) -def test_delete_security_health_analytics_custom_module_rest_flattened_error( +def test_get_effective_event_threat_detection_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -13684,13 +16311,13 @@ def test_delete_security_health_analytics_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_security_health_analytics_custom_module( - security_center_management.DeleteSecurityHealthAnalyticsCustomModuleRequest(), + client.get_effective_event_threat_detection_custom_module( + security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest(), name="name_value", ) -def test_delete_security_health_analytics_custom_module_rest_error(): +def test_get_effective_event_threat_detection_custom_module_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13699,11 +16326,11 @@ def test_delete_security_health_analytics_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", [ - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest, + security_center_management.ListEventThreatDetectionCustomModulesRequest, dict, ], ) -def test_simulate_security_health_analytics_custom_module_rest(request_type): +def test_list_event_threat_detection_custom_modules_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13717,30 +16344,32 @@ def test_simulate_security_health_analytics_custom_module_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = ( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + security_center_management.ListEventThreatDetectionCustomModulesResponse( + next_page_token="next_page_token_value", + ) ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.pb( - return_value + return_value = ( + security_center_management.ListEventThreatDetectionCustomModulesResponse.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.simulate_security_health_analytics_custom_module(request) + response = client.list_event_threat_detection_custom_modules(request) # Establish that the response is the type that we expect. - assert isinstance( - response, - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse, - ) + assert isinstance(response, pagers.ListEventThreatDetectionCustomModulesPager) + assert response.next_page_token == "next_page_token_value" -def test_simulate_security_health_analytics_custom_module_rest_use_cached_wrapped_rpc(): +def test_list_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13755,7 +16384,7 @@ def test_simulate_security_health_analytics_custom_module_rest_use_cached_wrappe # Ensure method has been cached assert ( - client._transport.simulate_security_health_analytics_custom_module + client._transport.list_event_threat_detection_custom_modules in client._transport._wrapped_methods ) @@ -13765,24 +16394,24 @@ def test_simulate_security_health_analytics_custom_module_rest_use_cached_wrappe "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.simulate_security_health_analytics_custom_module + client._transport.list_event_threat_detection_custom_modules ] = mock_rpc request = {} - client.simulate_security_health_analytics_custom_module(request) + client.list_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.simulate_security_health_analytics_custom_module(request) + client.list_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_simulate_security_health_analytics_custom_module_rest_required_fields( - request_type=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest, +def test_list_event_threat_detection_custom_modules_rest_required_fields( + request_type=security_center_management.ListEventThreatDetectionCustomModulesRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -13798,7 +16427,7 @@ def test_simulate_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).simulate_security_health_analytics_custom_module._get_unset_required_fields( + ).list_event_threat_detection_custom_modules._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -13809,9 +16438,16 @@ def test_simulate_security_health_analytics_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).simulate_security_health_analytics_custom_module._get_unset_required_fields( + ).list_event_threat_detection_custom_modules._get_unset_required_fields( jsonified_request ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13826,7 +16462,7 @@ def test_simulate_security_health_analytics_custom_module_rest_required_fields( # Designate an appropriate value for the returned response. return_value = ( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + security_center_management.ListEventThreatDetectionCustomModulesResponse() ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -13839,17 +16475,16 @@ def test_simulate_security_health_analytics_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.pb( + return_value = security_center_management.ListEventThreatDetectionCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -13857,42 +16492,43 @@ def test_simulate_security_health_analytics_custom_module_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.simulate_security_health_analytics_custom_module(request) + response = client.list_event_threat_detection_custom_modules(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_simulate_security_health_analytics_custom_module_rest_unset_required_fields(): +def test_list_event_threat_detection_custom_modules_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.simulate_security_health_analytics_custom_module._get_unset_required_fields( - {} + unset_fields = ( + transport.list_event_threat_detection_custom_modules._get_unset_required_fields( + {} + ) ) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "customConfig", - "resource", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_simulate_security_health_analytics_custom_module_rest_interceptors( - null_interceptor, -): +def test_list_event_threat_detection_custom_modules_rest_interceptors(null_interceptor): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -13901,15 +16537,15 @@ def test_simulate_security_health_analytics_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_simulate_security_health_analytics_custom_module", + "post_list_event_threat_detection_custom_modules", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_simulate_security_health_analytics_custom_module", + "pre_list_event_threat_detection_custom_modules", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest.pb( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest() + pb_message = security_center_management.ListEventThreatDetectionCustomModulesRequest.pb( + security_center_management.ListEventThreatDetectionCustomModulesRequest() ) transcode.return_value = { "method": "post", @@ -13921,12 +16557,12 @@ def test_simulate_security_health_analytics_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.to_json( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + req.return_value._content = security_center_management.ListEventThreatDetectionCustomModulesResponse.to_json( + security_center_management.ListEventThreatDetectionCustomModulesResponse() ) request = ( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest() + security_center_management.ListEventThreatDetectionCustomModulesRequest() ) metadata = [ ("key", "val"), @@ -13934,10 +16570,10 @@ def test_simulate_security_health_analytics_custom_module_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + security_center_management.ListEventThreatDetectionCustomModulesResponse() ) - client.simulate_security_health_analytics_custom_module( + client.list_event_threat_detection_custom_modules( request, metadata=[ ("key", "val"), @@ -13949,9 +16585,9 @@ def test_simulate_security_health_analytics_custom_module_rest_interceptors( post.assert_called_once() -def test_simulate_security_health_analytics_custom_module_rest_bad_request( +def test_list_event_threat_detection_custom_modules_rest_bad_request( transport: str = "rest", - request_type=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest, + request_type=security_center_management.ListEventThreatDetectionCustomModulesRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13971,10 +16607,10 @@ def test_simulate_security_health_analytics_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.simulate_security_health_analytics_custom_module(request) + client.list_event_threat_detection_custom_modules(request) -def test_simulate_security_health_analytics_custom_module_rest_flattened(): +def test_list_event_threat_detection_custom_modules_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13984,7 +16620,7 @@ def test_simulate_security_health_analytics_custom_module_rest_flattened(): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = ( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse() + security_center_management.ListEventThreatDetectionCustomModulesResponse() ) # get arguments that satisfy an http rule for this method @@ -13993,12 +16629,6 @@ def test_simulate_security_health_analytics_custom_module_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - custom_config=security_center_management.CustomConfig( - predicate=expr_pb2.Expr(expression="expression_value") - ), - resource=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest.SimulatedResource( - resource_type="resource_type_value" - ), ) mock_args.update(sample_request) @@ -14006,27 +16636,29 @@ def test_simulate_security_health_analytics_custom_module_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.SimulateSecurityHealthAnalyticsCustomModuleResponse.pb( - return_value + return_value = ( + security_center_management.ListEventThreatDetectionCustomModulesResponse.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.simulate_security_health_analytics_custom_module(**mock_args) + client.list_event_threat_detection_custom_modules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/securityHealthAnalyticsCustomModules:simulate" + "%s/v1/{parent=projects/*/locations/*}/eventThreatDetectionCustomModules" % client.transport._host, args[1], ) -def test_simulate_security_health_analytics_custom_module_rest_flattened_error( +def test_list_event_threat_detection_custom_modules_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -14037,32 +16669,95 @@ def test_simulate_security_health_analytics_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.simulate_security_health_analytics_custom_module( - security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest(), + client.list_event_threat_detection_custom_modules( + security_center_management.ListEventThreatDetectionCustomModulesRequest(), parent="parent_value", - custom_config=security_center_management.CustomConfig( - predicate=expr_pb2.Expr(expression="expression_value") - ), - resource=security_center_management.SimulateSecurityHealthAnalyticsCustomModuleRequest.SimulatedResource( - resource_type="resource_type_value" - ), ) -def test_simulate_security_health_analytics_custom_module_rest_error(): +def test_list_event_threat_detection_custom_modules_rest_pager(transport: str = "rest"): client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_center_management.ListEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[ + security_center_management.EventThreatDetectionCustomModule(), + security_center_management.EventThreatDetectionCustomModule(), + security_center_management.EventThreatDetectionCustomModule(), + ], + next_page_token="abc", + ), + security_center_management.ListEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[], + next_page_token="def", + ), + security_center_management.ListEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[ + security_center_management.EventThreatDetectionCustomModule(), + ], + next_page_token="ghi", + ), + security_center_management.ListEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[ + security_center_management.EventThreatDetectionCustomModule(), + security_center_management.EventThreatDetectionCustomModule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_center_management.ListEventThreatDetectionCustomModulesResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_event_threat_detection_custom_modules( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_center_management.EventThreatDetectionCustomModule) + for i in results + ) + + pages = list( + client.list_event_threat_detection_custom_modules( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest, + security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest, dict, ], ) -def test_list_effective_event_threat_detection_custom_modules_rest(request_type): +def test_list_descendant_event_threat_detection_custom_modules_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14075,7 +16770,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest(request_type) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( + return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( next_page_token="next_page_token_value", ) @@ -14083,23 +16778,23 @@ def test_list_effective_event_threat_detection_custom_modules_rest(request_type) response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.pb( + return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_effective_event_threat_detection_custom_modules(request) + response = client.list_descendant_event_threat_detection_custom_modules(request) # Establish that the response is the type that we expect. assert isinstance( - response, pagers.ListEffectiveEventThreatDetectionCustomModulesPager + response, pagers.ListDescendantEventThreatDetectionCustomModulesPager ) assert response.next_page_token == "next_page_token_value" -def test_list_effective_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc(): +def test_list_descendant_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14114,7 +16809,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_use_cached_wr # Ensure method has been cached assert ( - client._transport.list_effective_event_threat_detection_custom_modules + client._transport.list_descendant_event_threat_detection_custom_modules in client._transport._wrapped_methods ) @@ -14124,24 +16819,24 @@ def test_list_effective_event_threat_detection_custom_modules_rest_use_cached_wr "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_effective_event_threat_detection_custom_modules + client._transport.list_descendant_event_threat_detection_custom_modules ] = mock_rpc request = {} - client.list_effective_event_threat_detection_custom_modules(request) + client.list_descendant_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_effective_event_threat_detection_custom_modules(request) + client.list_descendant_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_effective_event_threat_detection_custom_modules_rest_required_fields( - request_type=security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest, +def test_list_descendant_event_threat_detection_custom_modules_rest_required_fields( + request_type=security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -14157,7 +16852,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_required_fiel unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_effective_event_threat_detection_custom_modules._get_unset_required_fields( + ).list_descendant_event_threat_detection_custom_modules._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -14168,7 +16863,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_required_fiel unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_effective_event_threat_detection_custom_modules._get_unset_required_fields( + ).list_descendant_event_threat_detection_custom_modules._get_unset_required_fields( jsonified_request ) # Check that path parameters and body parameters are not mixing in. @@ -14192,7 +16887,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_required_fiel # Designate an appropriate value for the returned response. return_value = ( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -14214,7 +16909,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_required_fiel response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.pb( + return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -14222,7 +16917,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_required_fiel response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_effective_event_threat_detection_custom_modules( + response = client.list_descendant_event_threat_detection_custom_modules( request ) @@ -14231,12 +16926,12 @@ def test_list_effective_event_threat_detection_custom_modules_rest_required_fiel assert expected_params == actual_params -def test_list_effective_event_threat_detection_custom_modules_rest_unset_required_fields(): +def test_list_descendant_event_threat_detection_custom_modules_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_effective_event_threat_detection_custom_modules._get_unset_required_fields( + unset_fields = transport.list_descendant_event_threat_detection_custom_modules._get_unset_required_fields( {} ) assert set(unset_fields) == ( @@ -14251,14 +16946,16 @@ def test_list_effective_event_threat_detection_custom_modules_rest_unset_require @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_effective_event_threat_detection_custom_modules_rest_interceptors( +def test_list_descendant_event_threat_detection_custom_modules_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -14267,15 +16964,15 @@ def test_list_effective_event_threat_detection_custom_modules_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_list_effective_event_threat_detection_custom_modules", + "post_list_descendant_event_threat_detection_custom_modules", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_list_effective_event_threat_detection_custom_modules", + "pre_list_descendant_event_threat_detection_custom_modules", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest.pb( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest() + pb_message = security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest.pb( + security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest() ) transcode.return_value = { "method": "post", @@ -14287,12 +16984,12 @@ def test_list_effective_event_threat_detection_custom_modules_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.to_json( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() + req.return_value._content = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.to_json( + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() ) request = ( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest() + security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest() ) metadata = [ ("key", "val"), @@ -14300,10 +16997,10 @@ def test_list_effective_event_threat_detection_custom_modules_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() ) - client.list_effective_event_threat_detection_custom_modules( + client.list_descendant_event_threat_detection_custom_modules( request, metadata=[ ("key", "val"), @@ -14315,9 +17012,9 @@ def test_list_effective_event_threat_detection_custom_modules_rest_interceptors( post.assert_called_once() -def test_list_effective_event_threat_detection_custom_modules_rest_bad_request( +def test_list_descendant_event_threat_detection_custom_modules_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest, + request_type=security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14337,10 +17034,10 @@ def test_list_effective_event_threat_detection_custom_modules_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_effective_event_threat_detection_custom_modules(request) + client.list_descendant_event_threat_detection_custom_modules(request) -def test_list_effective_event_threat_detection_custom_modules_rest_flattened(): +def test_list_descendant_event_threat_detection_custom_modules_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14350,7 +17047,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_flattened(): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = ( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse() + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() ) # get arguments that satisfy an http rule for this method @@ -14366,27 +17063,27 @@ def test_list_effective_event_threat_detection_custom_modules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.pb( + return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_effective_event_threat_detection_custom_modules(**mock_args) + client.list_descendant_event_threat_detection_custom_modules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/effectiveEventThreatDetectionCustomModules" + "%s/v1/{parent=projects/*/locations/*}/eventThreatDetectionCustomModules:listDescendant" % client.transport._host, args[1], ) -def test_list_effective_event_threat_detection_custom_modules_rest_flattened_error( +def test_list_descendant_event_threat_detection_custom_modules_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -14397,13 +17094,13 @@ def test_list_effective_event_threat_detection_custom_modules_rest_flattened_err # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_effective_event_threat_detection_custom_modules( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesRequest(), + client.list_descendant_event_threat_detection_custom_modules( + security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest(), parent="parent_value", ) -def test_list_effective_event_threat_detection_custom_modules_rest_pager( +def test_list_descendant_event_threat_detection_custom_modules_rest_pager( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -14417,28 +17114,28 @@ def test_list_effective_event_threat_detection_custom_modules_rest_pager( # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( - effective_event_threat_detection_custom_modules=[ - security_center_management.EffectiveEventThreatDetectionCustomModule(), - security_center_management.EffectiveEventThreatDetectionCustomModule(), - security_center_management.EffectiveEventThreatDetectionCustomModule(), + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[ + security_center_management.EventThreatDetectionCustomModule(), + security_center_management.EventThreatDetectionCustomModule(), + security_center_management.EventThreatDetectionCustomModule(), ], next_page_token="abc", ), - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( - effective_event_threat_detection_custom_modules=[], + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[], next_page_token="def", ), - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( - effective_event_threat_detection_custom_modules=[ - security_center_management.EffectiveEventThreatDetectionCustomModule(), + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[ + security_center_management.EventThreatDetectionCustomModule(), ], next_page_token="ghi", ), - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse( - effective_event_threat_detection_custom_modules=[ - security_center_management.EffectiveEventThreatDetectionCustomModule(), - security_center_management.EffectiveEventThreatDetectionCustomModule(), + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( + event_threat_detection_custom_modules=[ + security_center_management.EventThreatDetectionCustomModule(), + security_center_management.EventThreatDetectionCustomModule(), ], ), ) @@ -14447,7 +17144,7 @@ def test_list_effective_event_threat_detection_custom_modules_rest_pager( # Wrap the values into proper Response objs response = tuple( - security_center_management.ListEffectiveEventThreatDetectionCustomModulesResponse.to_json( + security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.to_json( x ) for x in response @@ -14460,21 +17157,19 @@ def test_list_effective_event_threat_detection_custom_modules_rest_pager( sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_effective_event_threat_detection_custom_modules( + pager = client.list_descendant_event_threat_detection_custom_modules( request=sample_request ) results = list(pager) assert len(results) == 6 assert all( - isinstance( - i, security_center_management.EffectiveEventThreatDetectionCustomModule - ) + isinstance(i, security_center_management.EventThreatDetectionCustomModule) for i in results ) pages = list( - client.list_effective_event_threat_detection_custom_modules( + client.list_descendant_event_threat_detection_custom_modules( request=sample_request ).pages ) @@ -14485,11 +17180,11 @@ def test_list_effective_event_threat_detection_custom_modules_rest_pager( @pytest.mark.parametrize( "request_type", [ - security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest, + security_center_management.GetEventThreatDetectionCustomModuleRequest, dict, ], ) -def test_get_effective_event_threat_detection_custom_module_rest(request_type): +def test_get_event_threat_detection_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14497,51 +17192,53 @@ def test_get_effective_event_threat_detection_custom_module_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/effectiveEventThreatDetectionCustomModules/sample3" + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EffectiveEventThreatDetectionCustomModule( + return_value = security_center_management.EventThreatDetectionCustomModule( name="name_value", - enablement_state=security_center_management.EffectiveEventThreatDetectionCustomModule.EnablementState.ENABLED, + ancestor_module="ancestor_module_value", + enablement_state=security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED, type_="type__value", display_name="display_name_value", description="description_value", + last_editor="last_editor_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.EffectiveEventThreatDetectionCustomModule.pb( - return_value - ) + return_value = security_center_management.EventThreatDetectionCustomModule.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_effective_event_threat_detection_custom_module(request) + response = client.get_event_threat_detection_custom_module(request) # Establish that the response is the type that we expect. assert isinstance( - response, security_center_management.EffectiveEventThreatDetectionCustomModule + response, security_center_management.EventThreatDetectionCustomModule ) assert response.name == "name_value" + assert response.ancestor_module == "ancestor_module_value" assert ( response.enablement_state - == security_center_management.EffectiveEventThreatDetectionCustomModule.EnablementState.ENABLED + == security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED ) assert response.type_ == "type__value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.last_editor == "last_editor_value" -def test_get_effective_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): +def test_get_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14556,7 +17253,7 @@ def test_get_effective_event_threat_detection_custom_module_rest_use_cached_wrap # Ensure method has been cached assert ( - client._transport.get_effective_event_threat_detection_custom_module + client._transport.get_event_threat_detection_custom_module in client._transport._wrapped_methods ) @@ -14566,24 +17263,24 @@ def test_get_effective_event_threat_detection_custom_module_rest_use_cached_wrap "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_effective_event_threat_detection_custom_module + client._transport.get_event_threat_detection_custom_module ] = mock_rpc request = {} - client.get_effective_event_threat_detection_custom_module(request) + client.get_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_effective_event_threat_detection_custom_module(request) + client.get_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_effective_event_threat_detection_custom_module_rest_required_fields( - request_type=security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest, +def test_get_event_threat_detection_custom_module_rest_required_fields( + request_type=security_center_management.GetEventThreatDetectionCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -14599,7 +17296,7 @@ def test_get_effective_event_threat_detection_custom_module_rest_required_fields unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_effective_event_threat_detection_custom_module._get_unset_required_fields( + ).get_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -14610,7 +17307,7 @@ def test_get_effective_event_threat_detection_custom_module_rest_required_fields unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_effective_event_threat_detection_custom_module._get_unset_required_fields( + ).get_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -14626,9 +17323,7 @@ def test_get_effective_event_threat_detection_custom_module_rest_required_fields request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.EffectiveEventThreatDetectionCustomModule() - ) + return_value = security_center_management.EventThreatDetectionCustomModule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14650,7 +17345,7 @@ def test_get_effective_event_threat_detection_custom_module_rest_required_fields # Convert return value to protobuf type return_value = ( - security_center_management.EffectiveEventThreatDetectionCustomModule.pb( + security_center_management.EventThreatDetectionCustomModule.pb( return_value ) ) @@ -14659,35 +17354,35 @@ def test_get_effective_event_threat_detection_custom_module_rest_required_fields response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_effective_event_threat_detection_custom_module( - request - ) + response = client.get_event_threat_detection_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_effective_event_threat_detection_custom_module_rest_unset_required_fields(): +def test_get_event_threat_detection_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_effective_event_threat_detection_custom_module._get_unset_required_fields( - {} + unset_fields = ( + transport.get_event_threat_detection_custom_module._get_unset_required_fields( + {} + ) ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_effective_event_threat_detection_custom_module_rest_interceptors( - null_interceptor, -): +def test_get_event_threat_detection_custom_module_rest_interceptors(null_interceptor): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -14696,15 +17391,17 @@ def test_get_effective_event_threat_detection_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_get_effective_event_threat_detection_custom_module", + "post_get_event_threat_detection_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_get_effective_event_threat_detection_custom_module", + "pre_get_event_threat_detection_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest.pb( - security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest() + pb_message = ( + security_center_management.GetEventThreatDetectionCustomModuleRequest.pb( + security_center_management.GetEventThreatDetectionCustomModuleRequest() + ) ) transcode.return_value = { "method": "post", @@ -14716,12 +17413,14 @@ def test_get_effective_event_threat_detection_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.EffectiveEventThreatDetectionCustomModule.to_json( - security_center_management.EffectiveEventThreatDetectionCustomModule() + req.return_value._content = ( + security_center_management.EventThreatDetectionCustomModule.to_json( + security_center_management.EventThreatDetectionCustomModule() + ) ) request = ( - security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest() + security_center_management.GetEventThreatDetectionCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -14729,10 +17428,10 @@ def test_get_effective_event_threat_detection_custom_module_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.EffectiveEventThreatDetectionCustomModule() + security_center_management.EventThreatDetectionCustomModule() ) - client.get_effective_event_threat_detection_custom_module( + client.get_event_threat_detection_custom_module( request, metadata=[ ("key", "val"), @@ -14744,9 +17443,9 @@ def test_get_effective_event_threat_detection_custom_module_rest_interceptors( post.assert_called_once() -def test_get_effective_event_threat_detection_custom_module_rest_bad_request( +def test_get_event_threat_detection_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest, + request_type=security_center_management.GetEventThreatDetectionCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14755,7 +17454,7 @@ def test_get_effective_event_threat_detection_custom_module_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/effectiveEventThreatDetectionCustomModules/sample3" + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" } request = request_type(**request_init) @@ -14768,10 +17467,10 @@ def test_get_effective_event_threat_detection_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_effective_event_threat_detection_custom_module(request) + client.get_event_threat_detection_custom_module(request) -def test_get_effective_event_threat_detection_custom_module_rest_flattened(): +def test_get_event_threat_detection_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14780,13 +17479,11 @@ def test_get_effective_event_threat_detection_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.EffectiveEventThreatDetectionCustomModule() - ) + return_value = security_center_management.EventThreatDetectionCustomModule() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/effectiveEventThreatDetectionCustomModules/sample3" + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" } # get truthy value for each flattened field @@ -14799,29 +17496,27 @@ def test_get_effective_event_threat_detection_custom_module_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.EffectiveEventThreatDetectionCustomModule.pb( - return_value - ) + return_value = security_center_management.EventThreatDetectionCustomModule.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_effective_event_threat_detection_custom_module(**mock_args) + client.get_event_threat_detection_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/effectiveEventThreatDetectionCustomModules/*}" + "%s/v1/{name=projects/*/locations/*/eventThreatDetectionCustomModules/*}" % client.transport._host, args[1], ) -def test_get_effective_event_threat_detection_custom_module_rest_flattened_error( +def test_get_event_threat_detection_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -14832,13 +17527,13 @@ def test_get_effective_event_threat_detection_custom_module_rest_flattened_error # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_effective_event_threat_detection_custom_module( - security_center_management.GetEffectiveEventThreatDetectionCustomModuleRequest(), + client.get_event_threat_detection_custom_module( + security_center_management.GetEventThreatDetectionCustomModuleRequest(), name="name_value", ) -def test_get_effective_event_threat_detection_custom_module_rest_error(): +def test_get_event_threat_detection_custom_module_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14847,11 +17542,11 @@ def test_get_effective_event_threat_detection_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", [ - security_center_management.ListEventThreatDetectionCustomModulesRequest, + security_center_management.CreateEventThreatDetectionCustomModuleRequest, dict, ], ) -def test_list_event_threat_detection_custom_modules_rest(request_type): +def test_create_event_threat_detection_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14859,38 +17554,139 @@ def test_list_event_threat_detection_custom_modules_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["event_threat_detection_custom_module"] = { + "name": "name_value", + "config": {"fields": {}}, + "ancestor_module": "ancestor_module_value", + "enablement_state": 1, + "type_": "type__value", + "display_name": "display_name_value", + "description": "description_value", + "update_time": {"seconds": 751, "nanos": 543}, + "last_editor": "last_editor_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = security_center_management.CreateEventThreatDetectionCustomModuleRequest.meta.fields[ + "event_threat_detection_custom_module" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "event_threat_detection_custom_module" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["event_threat_detection_custom_module"][field]) + ): + del request_init["event_threat_detection_custom_module"][field][i][ + subfield + ] + else: + del request_init["event_threat_detection_custom_module"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse( - next_page_token="next_page_token_value", - ) + return_value = security_center_management.EventThreatDetectionCustomModule( + name="name_value", + ancestor_module="ancestor_module_value", + enablement_state=security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED, + type_="type__value", + display_name="display_name_value", + description="description_value", + last_editor="last_editor_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse.pb( - return_value - ) + return_value = security_center_management.EventThreatDetectionCustomModule.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_event_threat_detection_custom_modules(request) + response = client.create_event_threat_detection_custom_module(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEventThreatDetectionCustomModulesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance( + response, security_center_management.EventThreatDetectionCustomModule + ) + assert response.name == "name_value" + assert response.ancestor_module == "ancestor_module_value" + assert ( + response.enablement_state + == security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED + ) + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.last_editor == "last_editor_value" -def test_list_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc(): +def test_create_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14905,7 +17701,7 @@ def test_list_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._transport.list_event_threat_detection_custom_modules + client._transport.create_event_threat_detection_custom_module in client._transport._wrapped_methods ) @@ -14915,24 +17711,24 @@ def test_list_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc( "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_event_threat_detection_custom_modules + client._transport.create_event_threat_detection_custom_module ] = mock_rpc request = {} - client.list_event_threat_detection_custom_modules(request) + client.create_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_event_threat_detection_custom_modules(request) + client.create_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_event_threat_detection_custom_modules_rest_required_fields( - request_type=security_center_management.ListEventThreatDetectionCustomModulesRequest, +def test_create_event_threat_detection_custom_module_rest_required_fields( + request_type=security_center_management.CreateEventThreatDetectionCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -14948,7 +17744,7 @@ def test_list_event_threat_detection_custom_modules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_event_threat_detection_custom_modules._get_unset_required_fields( + ).create_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -14959,16 +17755,11 @@ def test_list_event_threat_detection_custom_modules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_event_threat_detection_custom_modules._get_unset_required_fields( + ).create_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14982,9 +17773,7 @@ def test_list_event_threat_detection_custom_modules_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse() - ) + return_value = security_center_management.EventThreatDetectionCustomModule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14996,58 +17785,63 @@ def test_list_event_threat_detection_custom_modules_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListEventThreatDetectionCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.EventThreatDetectionCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_event_threat_detection_custom_modules(request) + response = client.create_event_threat_detection_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_event_threat_detection_custom_modules_rest_unset_required_fields(): +def test_create_event_threat_detection_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.list_event_threat_detection_custom_modules._get_unset_required_fields( - {} - ) + unset_fields = transport.create_event_threat_detection_custom_module._get_unset_required_fields( + {} ) assert set(unset_fields) == ( - set( + set(("validateOnly",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "eventThreatDetectionCustomModule", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_event_threat_detection_custom_modules_rest_interceptors(null_interceptor): +def test_create_event_threat_detection_custom_module_rest_interceptors( + null_interceptor, +): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -15056,15 +17850,15 @@ def test_list_event_threat_detection_custom_modules_rest_interceptors(null_inter path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_list_event_threat_detection_custom_modules", + "post_create_event_threat_detection_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_list_event_threat_detection_custom_modules", + "pre_create_event_threat_detection_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ListEventThreatDetectionCustomModulesRequest.pb( - security_center_management.ListEventThreatDetectionCustomModulesRequest() + pb_message = security_center_management.CreateEventThreatDetectionCustomModuleRequest.pb( + security_center_management.CreateEventThreatDetectionCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -15076,12 +17870,14 @@ def test_list_event_threat_detection_custom_modules_rest_interceptors(null_inter req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ListEventThreatDetectionCustomModulesResponse.to_json( - security_center_management.ListEventThreatDetectionCustomModulesResponse() + req.return_value._content = ( + security_center_management.EventThreatDetectionCustomModule.to_json( + security_center_management.EventThreatDetectionCustomModule() + ) ) request = ( - security_center_management.ListEventThreatDetectionCustomModulesRequest() + security_center_management.CreateEventThreatDetectionCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -15089,10 +17885,10 @@ def test_list_event_threat_detection_custom_modules_rest_interceptors(null_inter ] pre.return_value = request, metadata post.return_value = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse() + security_center_management.EventThreatDetectionCustomModule() ) - client.list_event_threat_detection_custom_modules( + client.create_event_threat_detection_custom_module( request, metadata=[ ("key", "val"), @@ -15104,9 +17900,9 @@ def test_list_event_threat_detection_custom_modules_rest_interceptors(null_inter post.assert_called_once() -def test_list_event_threat_detection_custom_modules_rest_bad_request( +def test_create_event_threat_detection_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ListEventThreatDetectionCustomModulesRequest, + request_type=security_center_management.CreateEventThreatDetectionCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15126,10 +17922,10 @@ def test_list_event_threat_detection_custom_modules_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_event_threat_detection_custom_modules(request) + client.create_event_threat_detection_custom_module(request) -def test_list_event_threat_detection_custom_modules_rest_flattened(): +def test_create_event_threat_detection_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15138,9 +17934,7 @@ def test_list_event_threat_detection_custom_modules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse() - ) + return_value = security_center_management.EventThreatDetectionCustomModule() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -15148,6 +17942,9 @@ def test_list_event_threat_detection_custom_modules_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( + name="name_value" + ), ) mock_args.update(sample_request) @@ -15155,16 +17952,14 @@ def test_list_event_threat_detection_custom_modules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse.pb( - return_value - ) + return_value = security_center_management.EventThreatDetectionCustomModule.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_event_threat_detection_custom_modules(**mock_args) + client.create_event_threat_detection_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -15177,7 +17972,7 @@ def test_list_event_threat_detection_custom_modules_rest_flattened(): ) -def test_list_event_threat_detection_custom_modules_rest_flattened_error( +def test_create_event_threat_detection_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -15188,132 +17983,173 @@ def test_list_event_threat_detection_custom_modules_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_event_threat_detection_custom_modules( - security_center_management.ListEventThreatDetectionCustomModulesRequest(), + client.create_event_threat_detection_custom_module( + security_center_management.CreateEventThreatDetectionCustomModuleRequest(), parent="parent_value", + event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( + name="name_value" + ), ) -def test_list_event_threat_detection_custom_modules_rest_pager(transport: str = "rest"): +def test_create_event_threat_detection_custom_module_rest_error(): client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - security_center_management.ListEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[ - security_center_management.EventThreatDetectionCustomModule(), - security_center_management.EventThreatDetectionCustomModule(), - security_center_management.EventThreatDetectionCustomModule(), - ], - next_page_token="abc", - ), - security_center_management.ListEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[], - next_page_token="def", - ), - security_center_management.ListEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[ - security_center_management.EventThreatDetectionCustomModule(), - ], - next_page_token="ghi", - ), - security_center_management.ListEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[ - security_center_management.EventThreatDetectionCustomModule(), - security_center_management.EventThreatDetectionCustomModule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - security_center_management.ListEventThreatDetectionCustomModulesResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_event_threat_detection_custom_modules( - request=sample_request - ) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, security_center_management.EventThreatDetectionCustomModule) - for i in results - ) - - pages = list( - client.list_event_threat_detection_custom_modules( - request=sample_request - ).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest, + security_center_management.UpdateEventThreatDetectionCustomModuleRequest, dict, ], ) -def test_list_descendant_event_threat_detection_custom_modules_rest(request_type): +def test_update_event_threat_detection_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "event_threat_detection_custom_module": { + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" + } + } + request_init["event_threat_detection_custom_module"] = { + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3", + "config": {"fields": {}}, + "ancestor_module": "ancestor_module_value", + "enablement_state": 1, + "type_": "type__value", + "display_name": "display_name_value", + "description": "description_value", + "update_time": {"seconds": 751, "nanos": 543}, + "last_editor": "last_editor_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = security_center_management.UpdateEventThreatDetectionCustomModuleRequest.meta.fields[ + "event_threat_detection_custom_module" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "event_threat_detection_custom_module" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["event_threat_detection_custom_module"][field]) + ): + del request_init["event_threat_detection_custom_module"][field][i][ + subfield + ] + else: + del request_init["event_threat_detection_custom_module"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( - next_page_token="next_page_token_value", + return_value = security_center_management.EventThreatDetectionCustomModule( + name="name_value", + ancestor_module="ancestor_module_value", + enablement_state=security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED, + type_="type__value", + display_name="display_name_value", + description="description_value", + last_editor="last_editor_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.pb( + return_value = security_center_management.EventThreatDetectionCustomModule.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_descendant_event_threat_detection_custom_modules(request) + response = client.update_event_threat_detection_custom_module(request) # Establish that the response is the type that we expect. assert isinstance( - response, pagers.ListDescendantEventThreatDetectionCustomModulesPager + response, security_center_management.EventThreatDetectionCustomModule ) - assert response.next_page_token == "next_page_token_value" + assert response.name == "name_value" + assert response.ancestor_module == "ancestor_module_value" + assert ( + response.enablement_state + == security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED + ) + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.last_editor == "last_editor_value" -def test_list_descendant_event_threat_detection_custom_modules_rest_use_cached_wrapped_rpc(): +def test_update_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15328,7 +18164,7 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_use_cached_w # Ensure method has been cached assert ( - client._transport.list_descendant_event_threat_detection_custom_modules + client._transport.update_event_threat_detection_custom_module in client._transport._wrapped_methods ) @@ -15338,29 +18174,28 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_use_cached_w "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_descendant_event_threat_detection_custom_modules + client._transport.update_event_threat_detection_custom_module ] = mock_rpc request = {} - client.list_descendant_event_threat_detection_custom_modules(request) + client.update_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_descendant_event_threat_detection_custom_modules(request) + client.update_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_descendant_event_threat_detection_custom_modules_rest_required_fields( - request_type=security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest, +def test_update_event_threat_detection_custom_module_rest_required_fields( + request_type=security_center_management.UpdateEventThreatDetectionCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15371,32 +18206,28 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_required_fie unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_descendant_event_threat_detection_custom_modules._get_unset_required_fields( + ).update_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_descendant_event_threat_detection_custom_modules._get_unset_required_fields( + ).update_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15405,9 +18236,7 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_required_fie request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() - ) + return_value = security_center_management.EventThreatDetectionCustomModule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15419,60 +18248,68 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_required_fie pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.pb( - return_value + return_value = ( + security_center_management.EventThreatDetectionCustomModule.pb( + return_value + ) ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_descendant_event_threat_detection_custom_modules( - request - ) + response = client.update_event_threat_detection_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_descendant_event_threat_detection_custom_modules_rest_unset_required_fields(): +def test_update_event_threat_detection_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_descendant_event_threat_detection_custom_modules._get_unset_required_fields( + unset_fields = transport.update_event_threat_detection_custom_module._get_unset_required_fields( {} ) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "eventThreatDetectionCustomModule", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_descendant_event_threat_detection_custom_modules_rest_interceptors( +def test_update_event_threat_detection_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -15481,15 +18318,15 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_interceptors path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_list_descendant_event_threat_detection_custom_modules", + "post_update_event_threat_detection_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_list_descendant_event_threat_detection_custom_modules", + "pre_update_event_threat_detection_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest.pb( - security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest() + pb_message = security_center_management.UpdateEventThreatDetectionCustomModuleRequest.pb( + security_center_management.UpdateEventThreatDetectionCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -15501,12 +18338,14 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_interceptors req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.to_json( - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() + req.return_value._content = ( + security_center_management.EventThreatDetectionCustomModule.to_json( + security_center_management.EventThreatDetectionCustomModule() + ) ) request = ( - security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest() + security_center_management.UpdateEventThreatDetectionCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -15514,10 +18353,10 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_interceptors ] pre.return_value = request, metadata post.return_value = ( - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() + security_center_management.EventThreatDetectionCustomModule() ) - client.list_descendant_event_threat_detection_custom_modules( + client.update_event_threat_detection_custom_module( request, metadata=[ ("key", "val"), @@ -15529,9 +18368,9 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_interceptors post.assert_called_once() -def test_list_descendant_event_threat_detection_custom_modules_rest_bad_request( +def test_update_event_threat_detection_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest, + request_type=security_center_management.UpdateEventThreatDetectionCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15539,7 +18378,11 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "event_threat_detection_custom_module": { + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15551,10 +18394,10 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_descendant_event_threat_detection_custom_modules(request) + client.update_event_threat_detection_custom_module(request) -def test_list_descendant_event_threat_detection_custom_modules_rest_flattened(): +def test_update_event_threat_detection_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15563,16 +18406,21 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse() - ) + return_value = security_center_management.EventThreatDetectionCustomModule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "event_threat_detection_custom_module": { + "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -15580,27 +18428,27 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.pb( + return_value = security_center_management.EventThreatDetectionCustomModule.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_descendant_event_threat_detection_custom_modules(**mock_args) + client.update_event_threat_detection_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/eventThreatDetectionCustomModules:listDescendant" + "%s/v1/{event_threat_detection_custom_module.name=projects/*/locations/*/eventThreatDetectionCustomModules/*}" % client.transport._host, args[1], ) -def test_list_descendant_event_threat_detection_custom_modules_rest_flattened_error( +def test_update_event_threat_detection_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -15611,97 +18459,29 @@ def test_list_descendant_event_threat_detection_custom_modules_rest_flattened_er # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_descendant_event_threat_detection_custom_modules( - security_center_management.ListDescendantEventThreatDetectionCustomModulesRequest(), - parent="parent_value", + client.update_event_threat_detection_custom_module( + security_center_management.UpdateEventThreatDetectionCustomModuleRequest(), + event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_descendant_event_threat_detection_custom_modules_rest_pager( - transport: str = "rest", -): +def test_update_event_threat_detection_custom_module_rest_error(): client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[ - security_center_management.EventThreatDetectionCustomModule(), - security_center_management.EventThreatDetectionCustomModule(), - security_center_management.EventThreatDetectionCustomModule(), - ], - next_page_token="abc", - ), - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[], - next_page_token="def", - ), - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[ - security_center_management.EventThreatDetectionCustomModule(), - ], - next_page_token="ghi", - ), - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse( - event_threat_detection_custom_modules=[ - security_center_management.EventThreatDetectionCustomModule(), - security_center_management.EventThreatDetectionCustomModule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - security_center_management.ListDescendantEventThreatDetectionCustomModulesResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_descendant_event_threat_detection_custom_modules( - request=sample_request - ) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, security_center_management.EventThreatDetectionCustomModule) - for i in results - ) - - pages = list( - client.list_descendant_event_threat_detection_custom_modules( - request=sample_request - ).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - security_center_management.GetEventThreatDetectionCustomModuleRequest, + security_center_management.DeleteEventThreatDetectionCustomModuleRequest, dict, ], ) -def test_get_event_threat_detection_custom_module_rest(request_type): +def test_delete_event_threat_detection_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15716,46 +18496,22 @@ def test_get_event_threat_detection_custom_module_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule( - name="name_value", - ancestor_module="ancestor_module_value", - enablement_state=security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED, - type_="type__value", - display_name="display_name_value", - description="description_value", - last_editor="last_editor_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_event_threat_detection_custom_module(request) + response = client.delete_event_threat_detection_custom_module(request) # Establish that the response is the type that we expect. - assert isinstance( - response, security_center_management.EventThreatDetectionCustomModule - ) - assert response.name == "name_value" - assert response.ancestor_module == "ancestor_module_value" - assert ( - response.enablement_state - == security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED - ) - assert response.type_ == "type__value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.last_editor == "last_editor_value" + assert response is None -def test_get_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): +def test_delete_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15770,7 +18526,7 @@ def test_get_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_event_threat_detection_custom_module + client._transport.delete_event_threat_detection_custom_module in client._transport._wrapped_methods ) @@ -15780,24 +18536,24 @@ def test_get_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_event_threat_detection_custom_module + client._transport.delete_event_threat_detection_custom_module ] = mock_rpc request = {} - client.get_event_threat_detection_custom_module(request) + client.delete_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_event_threat_detection_custom_module(request) + client.delete_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_event_threat_detection_custom_module_rest_required_fields( - request_type=security_center_management.GetEventThreatDetectionCustomModuleRequest, +def test_delete_event_threat_detection_custom_module_rest_required_fields( + request_type=security_center_management.DeleteEventThreatDetectionCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport @@ -15813,7 +18569,7 @@ def test_get_event_threat_detection_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_event_threat_detection_custom_module._get_unset_required_fields( + ).delete_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -15824,9 +18580,11 @@ def test_get_event_threat_detection_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_event_threat_detection_custom_module._get_unset_required_fields( + ).delete_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -15840,7 +18598,7 @@ def test_get_event_threat_detection_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15852,52 +18610,47 @@ def test_get_event_threat_detection_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_event_threat_detection_custom_module(request) + response = client.delete_event_threat_detection_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_event_threat_detection_custom_module_rest_unset_required_fields(): +def test_delete_event_threat_detection_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.get_event_threat_detection_custom_module._get_unset_required_fields( - {} - ) + unset_fields = transport.delete_event_threat_detection_custom_module._get_unset_required_fields( + {} ) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("validateOnly",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_event_threat_detection_custom_module_rest_interceptors(null_interceptor): +def test_delete_event_threat_detection_custom_module_rest_interceptors( + null_interceptor, +): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -15906,17 +18659,11 @@ def test_get_event_threat_detection_custom_module_rest_interceptors(null_interce path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_get_event_threat_detection_custom_module", - ) as post, mock.patch.object( - transports.SecurityCenterManagementRestInterceptor, - "pre_get_event_threat_detection_custom_module", + "pre_delete_event_threat_detection_custom_module", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = ( - security_center_management.GetEventThreatDetectionCustomModuleRequest.pb( - security_center_management.GetEventThreatDetectionCustomModuleRequest() - ) + pb_message = security_center_management.DeleteEventThreatDetectionCustomModuleRequest.pb( + security_center_management.DeleteEventThreatDetectionCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -15928,25 +18675,17 @@ def test_get_event_threat_detection_custom_module_rest_interceptors(null_interce req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - security_center_management.EventThreatDetectionCustomModule.to_json( - security_center_management.EventThreatDetectionCustomModule() - ) - ) request = ( - security_center_management.GetEventThreatDetectionCustomModuleRequest() + security_center_management.DeleteEventThreatDetectionCustomModuleRequest() ) metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - security_center_management.EventThreatDetectionCustomModule() - ) - client.get_event_threat_detection_custom_module( + client.delete_event_threat_detection_custom_module( request, metadata=[ ("key", "val"), @@ -15955,12 +18694,11 @@ def test_get_event_threat_detection_custom_module_rest_interceptors(null_interce ) pre.assert_called_once() - post.assert_called_once() -def test_get_event_threat_detection_custom_module_rest_bad_request( +def test_delete_event_threat_detection_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.GetEventThreatDetectionCustomModuleRequest, + request_type=security_center_management.DeleteEventThreatDetectionCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15982,10 +18720,10 @@ def test_get_event_threat_detection_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_event_threat_detection_custom_module(request) + client.delete_event_threat_detection_custom_module(request) -def test_get_event_threat_detection_custom_module_rest_flattened(): +def test_delete_event_threat_detection_custom_module_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15994,7 +18732,7 @@ def test_get_event_threat_detection_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -16010,15 +18748,11 @@ def test_get_event_threat_detection_custom_module_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_event_threat_detection_custom_module(**mock_args) + client.delete_event_threat_detection_custom_module(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -16031,7 +18765,7 @@ def test_get_event_threat_detection_custom_module_rest_flattened(): ) -def test_get_event_threat_detection_custom_module_rest_flattened_error( +def test_delete_event_threat_detection_custom_module_rest_flattened_error( transport: str = "rest", ): client = SecurityCenterManagementClient( @@ -16042,13 +18776,13 @@ def test_get_event_threat_detection_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_event_threat_detection_custom_module( - security_center_management.GetEventThreatDetectionCustomModuleRequest(), + client.delete_event_threat_detection_custom_module( + security_center_management.DeleteEventThreatDetectionCustomModuleRequest(), name="name_value", ) -def test_get_event_threat_detection_custom_module_rest_error(): +def test_delete_event_threat_detection_custom_module_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16057,11 +18791,11 @@ def test_get_event_threat_detection_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", [ - security_center_management.CreateEventThreatDetectionCustomModuleRequest, + security_center_management.ValidateEventThreatDetectionCustomModuleRequest, dict, ], ) -def test_create_event_threat_detection_custom_module_rest(request_type): +def test_validate_event_threat_detection_custom_module_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16069,139 +18803,36 @@ def test_create_event_threat_detection_custom_module_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["event_threat_detection_custom_module"] = { - "name": "name_value", - "config": {"fields": {}}, - "ancestor_module": "ancestor_module_value", - "enablement_state": 1, - "type_": "type__value", - "display_name": "display_name_value", - "description": "description_value", - "update_time": {"seconds": 751, "nanos": 543}, - "last_editor": "last_editor_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = security_center_management.CreateEventThreatDetectionCustomModuleRequest.meta.fields[ - "event_threat_detection_custom_module" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "event_threat_detection_custom_module" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["event_threat_detection_custom_module"][field]) - ): - del request_init["event_threat_detection_custom_module"][field][i][ - subfield - ] - else: - del request_init["event_threat_detection_custom_module"][field][ - subfield - ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule( - name="name_value", - ancestor_module="ancestor_module_value", - enablement_state=security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED, - type_="type__value", - display_name="display_name_value", - description="description_value", - last_editor="last_editor_value", + return_value = ( + security_center_management.ValidateEventThreatDetectionCustomModuleResponse() ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.EventThreatDetectionCustomModule.pb( + return_value = security_center_management.ValidateEventThreatDetectionCustomModuleResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_event_threat_detection_custom_module(request) + response = client.validate_event_threat_detection_custom_module(request) # Establish that the response is the type that we expect. assert isinstance( - response, security_center_management.EventThreatDetectionCustomModule - ) - assert response.name == "name_value" - assert response.ancestor_module == "ancestor_module_value" - assert ( - response.enablement_state - == security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED + response, + security_center_management.ValidateEventThreatDetectionCustomModuleResponse, ) - assert response.type_ == "type__value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.last_editor == "last_editor_value" -def test_create_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): +def test_validate_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16216,7 +18847,7 @@ def test_create_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.create_event_threat_detection_custom_module + client._transport.validate_event_threat_detection_custom_module in client._transport._wrapped_methods ) @@ -16226,29 +18857,31 @@ def test_create_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_event_threat_detection_custom_module + client._transport.validate_event_threat_detection_custom_module ] = mock_rpc request = {} - client.create_event_threat_detection_custom_module(request) + client.validate_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_event_threat_detection_custom_module(request) + client.validate_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_event_threat_detection_custom_module_rest_required_fields( - request_type=security_center_management.CreateEventThreatDetectionCustomModuleRequest, +def test_validate_event_threat_detection_custom_module_rest_required_fields( + request_type=security_center_management.ValidateEventThreatDetectionCustomModuleRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} request_init["parent"] = "" + request_init["raw_text"] = "" + request_init["type_"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16259,7 +18892,7 @@ def test_create_event_threat_detection_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_event_threat_detection_custom_module._get_unset_required_fields( + ).validate_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -16267,19 +18900,23 @@ def test_create_event_threat_detection_custom_module_rest_required_fields( # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["rawText"] = "raw_text_value" + jsonified_request["type"] = "type__value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_event_threat_detection_custom_module._get_unset_required_fields( + ).validate_event_threat_detection_custom_module._get_unset_required_fields( jsonified_request ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "rawText" in jsonified_request + assert jsonified_request["rawText"] == "raw_text_value" + assert "type" in jsonified_request + assert jsonified_request["type"] == "type__value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16288,7 +18925,9 @@ def test_create_event_threat_detection_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule() + return_value = ( + security_center_management.ValidateEventThreatDetectionCustomModuleResponse() + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16310,51 +18949,52 @@ def test_create_event_threat_detection_custom_module_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) + return_value = security_center_management.ValidateEventThreatDetectionCustomModuleResponse.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_event_threat_detection_custom_module(request) + response = client.validate_event_threat_detection_custom_module(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_event_threat_detection_custom_module_rest_unset_required_fields(): +def test_validate_event_threat_detection_custom_module_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_event_threat_detection_custom_module._get_unset_required_fields( + unset_fields = transport.validate_event_threat_detection_custom_module._get_unset_required_fields( {} ) assert set(unset_fields) == ( - set(("validateOnly",)) + set(()) & set( ( "parent", - "eventThreatDetectionCustomModule", + "rawText", + "type", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_event_threat_detection_custom_module_rest_interceptors( +def test_validate_event_threat_detection_custom_module_rest_interceptors( null_interceptor, ): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -16363,15 +19003,15 @@ def test_create_event_threat_detection_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_create_event_threat_detection_custom_module", + "post_validate_event_threat_detection_custom_module", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_create_event_threat_detection_custom_module", + "pre_validate_event_threat_detection_custom_module", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.CreateEventThreatDetectionCustomModuleRequest.pb( - security_center_management.CreateEventThreatDetectionCustomModuleRequest() + pb_message = security_center_management.ValidateEventThreatDetectionCustomModuleRequest.pb( + security_center_management.ValidateEventThreatDetectionCustomModuleRequest() ) transcode.return_value = { "method": "post", @@ -16383,14 +19023,12 @@ def test_create_event_threat_detection_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - security_center_management.EventThreatDetectionCustomModule.to_json( - security_center_management.EventThreatDetectionCustomModule() - ) + req.return_value._content = security_center_management.ValidateEventThreatDetectionCustomModuleResponse.to_json( + security_center_management.ValidateEventThreatDetectionCustomModuleResponse() ) request = ( - security_center_management.CreateEventThreatDetectionCustomModuleRequest() + security_center_management.ValidateEventThreatDetectionCustomModuleRequest() ) metadata = [ ("key", "val"), @@ -16398,10 +19036,10 @@ def test_create_event_threat_detection_custom_module_rest_interceptors( ] pre.return_value = request, metadata post.return_value = ( - security_center_management.EventThreatDetectionCustomModule() + security_center_management.ValidateEventThreatDetectionCustomModuleResponse() ) - client.create_event_threat_detection_custom_module( + client.validate_event_threat_detection_custom_module( request, metadata=[ ("key", "val"), @@ -16413,9 +19051,9 @@ def test_create_event_threat_detection_custom_module_rest_interceptors( post.assert_called_once() -def test_create_event_threat_detection_custom_module_rest_bad_request( +def test_validate_event_threat_detection_custom_module_rest_bad_request( transport: str = "rest", - request_type=security_center_management.CreateEventThreatDetectionCustomModuleRequest, + request_type=security_center_management.ValidateEventThreatDetectionCustomModuleRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16435,77 +19073,10 @@ def test_create_event_threat_detection_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_event_threat_detection_custom_module(request) - - -def test_create_event_threat_detection_custom_module_rest_flattened(): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( - name="name_value" - ), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_event_threat_detection_custom_module(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/eventThreatDetectionCustomModules" - % client.transport._host, - args[1], - ) - - -def test_create_event_threat_detection_custom_module_rest_flattened_error( - transport: str = "rest", -): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_event_threat_detection_custom_module( - security_center_management.CreateEventThreatDetectionCustomModuleRequest(), - parent="parent_value", - event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( - name="name_value" - ), - ) + client.validate_event_threat_detection_custom_module(request) -def test_create_event_threat_detection_custom_module_rest_error(): +def test_validate_event_threat_detection_custom_module_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16514,11 +19085,11 @@ def test_create_event_threat_detection_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", [ - security_center_management.UpdateEventThreatDetectionCustomModuleRequest, + security_center_management.GetSecurityCenterServiceRequest, dict, ], ) -def test_update_event_threat_detection_custom_module_rest(request_type): +def test_get_security_center_service_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16526,143 +19097,44 @@ def test_update_event_threat_detection_custom_module_rest(request_type): # send a request that will satisfy transcoding request_init = { - "event_threat_detection_custom_module": { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" - } - } - request_init["event_threat_detection_custom_module"] = { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3", - "config": {"fields": {}}, - "ancestor_module": "ancestor_module_value", - "enablement_state": 1, - "type_": "type__value", - "display_name": "display_name_value", - "description": "description_value", - "update_time": {"seconds": 751, "nanos": 543}, - "last_editor": "last_editor_value", + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = security_center_management.UpdateEventThreatDetectionCustomModuleRequest.meta.fields[ - "event_threat_detection_custom_module" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "event_threat_detection_custom_module" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["event_threat_detection_custom_module"][field]) - ): - del request_init["event_threat_detection_custom_module"][field][i][ - subfield - ] - else: - del request_init["event_threat_detection_custom_module"][field][ - subfield - ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule( + return_value = security_center_management.SecurityCenterService( name="name_value", - ancestor_module="ancestor_module_value", - enablement_state=security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED, - type_="type__value", - display_name="display_name_value", - description="description_value", - last_editor="last_editor_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) + return_value = security_center_management.SecurityCenterService.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_event_threat_detection_custom_module(request) + response = client.get_security_center_service(request) # Establish that the response is the type that we expect. - assert isinstance( - response, security_center_management.EventThreatDetectionCustomModule - ) + assert isinstance(response, security_center_management.SecurityCenterService) assert response.name == "name_value" - assert response.ancestor_module == "ancestor_module_value" assert ( - response.enablement_state - == security_center_management.EventThreatDetectionCustomModule.EnablementState.ENABLED + response.intended_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + assert ( + response.effective_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED ) - assert response.type_ == "type__value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.last_editor == "last_editor_value" -def test_update_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): +def test_get_security_center_service_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16677,7 +19149,7 @@ def test_update_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.update_event_threat_detection_custom_module + client._transport.get_security_center_service in client._transport._wrapped_methods ) @@ -16687,28 +19159,29 @@ def test_update_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_event_threat_detection_custom_module + client._transport.get_security_center_service ] = mock_rpc request = {} - client.update_event_threat_detection_custom_module(request) + client.get_security_center_service(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_event_threat_detection_custom_module(request) + client.get_security_center_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_event_threat_detection_custom_module_rest_required_fields( - request_type=security_center_management.UpdateEventThreatDetectionCustomModuleRequest, +def test_get_security_center_service_rest_required_fields( + request_type=security_center_management.GetSecurityCenterServiceRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16719,28 +19192,23 @@ def test_update_event_threat_detection_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_event_threat_detection_custom_module._get_unset_required_fields( - jsonified_request - ) + ).get_security_center_service._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_event_threat_detection_custom_module._get_unset_required_fields( - jsonified_request - ) + ).get_security_center_service._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "update_mask", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("show_eligible_modules_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16749,7 +19217,7 @@ def test_update_event_threat_detection_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule() + return_value = security_center_management.SecurityCenterService() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16761,66 +19229,48 @@ def test_update_event_threat_detection_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) + return_value = security_center_management.SecurityCenterService.pb( + return_value ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_event_threat_detection_custom_module(request) + response = client.get_security_center_service(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_event_threat_detection_custom_module_rest_unset_required_fields(): +def test_get_security_center_service_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_event_threat_detection_custom_module._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "eventThreatDetectionCustomModule", - ) - ) - ) + unset_fields = transport.get_security_center_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(("showEligibleModulesOnly",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_event_threat_detection_custom_module_rest_interceptors( - null_interceptor, -): +def test_get_security_center_service_rest_interceptors(null_interceptor): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -16829,15 +19279,15 @@ def test_update_event_threat_detection_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_update_event_threat_detection_custom_module", + "post_get_security_center_service", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_update_event_threat_detection_custom_module", + "pre_get_security_center_service", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.UpdateEventThreatDetectionCustomModuleRequest.pb( - security_center_management.UpdateEventThreatDetectionCustomModuleRequest() + pb_message = security_center_management.GetSecurityCenterServiceRequest.pb( + security_center_management.GetSecurityCenterServiceRequest() ) transcode.return_value = { "method": "post", @@ -16850,24 +19300,20 @@ def test_update_event_threat_detection_custom_module_rest_interceptors( req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - security_center_management.EventThreatDetectionCustomModule.to_json( - security_center_management.EventThreatDetectionCustomModule() + security_center_management.SecurityCenterService.to_json( + security_center_management.SecurityCenterService() ) ) - request = ( - security_center_management.UpdateEventThreatDetectionCustomModuleRequest() - ) + request = security_center_management.GetSecurityCenterServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - security_center_management.EventThreatDetectionCustomModule() - ) + post.return_value = security_center_management.SecurityCenterService() - client.update_event_threat_detection_custom_module( + client.get_security_center_service( request, metadata=[ ("key", "val"), @@ -16879,9 +19325,9 @@ def test_update_event_threat_detection_custom_module_rest_interceptors( post.assert_called_once() -def test_update_event_threat_detection_custom_module_rest_bad_request( +def test_get_security_center_service_rest_bad_request( transport: str = "rest", - request_type=security_center_management.UpdateEventThreatDetectionCustomModuleRequest, + request_type=security_center_management.GetSecurityCenterServiceRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16890,9 +19336,7 @@ def test_update_event_threat_detection_custom_module_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "event_threat_detection_custom_module": { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" - } + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3" } request = request_type(**request_init) @@ -16905,10 +19349,10 @@ def test_update_event_threat_detection_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_event_threat_detection_custom_module(request) + client.get_security_center_service(request) -def test_update_event_threat_detection_custom_module_rest_flattened(): +def test_get_security_center_service_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16917,21 +19361,16 @@ def test_update_event_threat_detection_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = security_center_management.EventThreatDetectionCustomModule() + return_value = security_center_management.SecurityCenterService() # get arguments that satisfy an http rule for this method sample_request = { - "event_threat_detection_custom_module": { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" - } + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3" } # get truthy value for each flattened field mock_args = dict( - event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -16939,29 +19378,25 @@ def test_update_event_threat_detection_custom_module_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.EventThreatDetectionCustomModule.pb( - return_value - ) + return_value = security_center_management.SecurityCenterService.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_event_threat_detection_custom_module(**mock_args) + client.get_security_center_service(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{event_threat_detection_custom_module.name=projects/*/locations/*/eventThreatDetectionCustomModules/*}" + "%s/v1/{name=projects/*/locations/*/securityCenterServices/*}" % client.transport._host, args[1], ) -def test_update_event_threat_detection_custom_module_rest_flattened_error( - transport: str = "rest", -): +def test_get_security_center_service_rest_flattened_error(transport: str = "rest"): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16970,16 +19405,13 @@ def test_update_event_threat_detection_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_event_threat_detection_custom_module( - security_center_management.UpdateEventThreatDetectionCustomModuleRequest(), - event_threat_detection_custom_module=security_center_management.EventThreatDetectionCustomModule( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_security_center_service( + security_center_management.GetSecurityCenterServiceRequest(), + name="name_value", ) -def test_update_event_threat_detection_custom_module_rest_error(): +def test_get_security_center_service_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16988,41 +19420,46 @@ def test_update_event_threat_detection_custom_module_rest_error(): @pytest.mark.parametrize( "request_type", [ - security_center_management.DeleteEventThreatDetectionCustomModuleRequest, + security_center_management.ListSecurityCenterServicesRequest, dict, ], ) -def test_delete_event_threat_detection_custom_module_rest(request_type): +def test_list_security_center_services_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = security_center_management.ListSecurityCenterServicesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = security_center_management.ListSecurityCenterServicesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_event_threat_detection_custom_module(request) + response = client.list_security_center_services(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListSecurityCenterServicesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): +def test_list_security_center_services_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17037,7 +19474,7 @@ def test_delete_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.delete_event_threat_detection_custom_module + client._transport.list_security_center_services in client._transport._wrapped_methods ) @@ -17047,29 +19484,29 @@ def test_delete_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_event_threat_detection_custom_module + client._transport.list_security_center_services ] = mock_rpc request = {} - client.delete_event_threat_detection_custom_module(request) + client.list_security_center_services(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_event_threat_detection_custom_module(request) + client.list_security_center_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_event_threat_detection_custom_module_rest_required_fields( - request_type=security_center_management.DeleteEventThreatDetectionCustomModuleRequest, +def test_list_security_center_services_rest_required_fields( + request_type=security_center_management.ListSecurityCenterServicesRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17080,27 +19517,29 @@ def test_delete_event_threat_detection_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_event_threat_detection_custom_module._get_unset_required_fields( - jsonified_request - ) + ).list_security_center_services._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_event_threat_detection_custom_module._get_unset_required_fields( - jsonified_request - ) + ).list_security_center_services._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "show_eligible_modules_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17109,7 +19548,7 @@ def test_delete_event_threat_detection_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = security_center_management.ListSecurityCenterServicesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17121,45 +19560,61 @@ def test_delete_event_threat_detection_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = ( + security_center_management.ListSecurityCenterServicesResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_event_threat_detection_custom_module(request) + response = client.list_security_center_services(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_event_threat_detection_custom_module_rest_unset_required_fields(): +def test_list_security_center_services_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_event_threat_detection_custom_module._get_unset_required_fields( + unset_fields = transport.list_security_center_services._get_unset_required_fields( {} ) - assert set(unset_fields) == (set(("validateOnly",)) & set(("name",))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "showEligibleModulesOnly", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_event_threat_detection_custom_module_rest_interceptors( - null_interceptor, -): +def test_list_security_center_services_rest_interceptors(null_interceptor): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -17168,11 +19623,15 @@ def test_delete_event_threat_detection_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_delete_event_threat_detection_custom_module", + "post_list_security_center_services", + ) as post, mock.patch.object( + transports.SecurityCenterManagementRestInterceptor, + "pre_list_security_center_services", ) as pre: pre.assert_not_called() - pb_message = security_center_management.DeleteEventThreatDetectionCustomModuleRequest.pb( - security_center_management.DeleteEventThreatDetectionCustomModuleRequest() + post.assert_not_called() + pb_message = security_center_management.ListSecurityCenterServicesRequest.pb( + security_center_management.ListSecurityCenterServicesRequest() ) transcode.return_value = { "method": "post", @@ -17184,17 +19643,23 @@ def test_delete_event_threat_detection_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - - request = ( - security_center_management.DeleteEventThreatDetectionCustomModuleRequest() + req.return_value._content = ( + security_center_management.ListSecurityCenterServicesResponse.to_json( + security_center_management.ListSecurityCenterServicesResponse() + ) ) + + request = security_center_management.ListSecurityCenterServicesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = ( + security_center_management.ListSecurityCenterServicesResponse() + ) - client.delete_event_threat_detection_custom_module( + client.list_security_center_services( request, metadata=[ ("key", "val"), @@ -17203,11 +19668,12 @@ def test_delete_event_threat_detection_custom_module_rest_interceptors( ) pre.assert_called_once() + post.assert_called_once() -def test_delete_event_threat_detection_custom_module_rest_bad_request( +def test_list_security_center_services_rest_bad_request( transport: str = "rest", - request_type=security_center_management.DeleteEventThreatDetectionCustomModuleRequest, + request_type=security_center_management.ListSecurityCenterServicesRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17215,9 +19681,7 @@ def test_delete_event_threat_detection_custom_module_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17229,10 +19693,10 @@ def test_delete_event_threat_detection_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_event_threat_detection_custom_module(request) + client.list_security_center_services(request) -def test_delete_event_threat_detection_custom_module_rest_flattened(): +def test_list_security_center_services_rest_flattened(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17241,42 +19705,42 @@ def test_delete_event_threat_detection_custom_module_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = security_center_management.ListSecurityCenterServicesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/eventThreatDetectionCustomModules/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = security_center_management.ListSecurityCenterServicesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_event_threat_detection_custom_module(**mock_args) + client.list_security_center_services(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/eventThreatDetectionCustomModules/*}" + "%s/v1/{parent=projects/*/locations/*}/securityCenterServices" % client.transport._host, args[1], ) -def test_delete_event_threat_detection_custom_module_rest_flattened_error( - transport: str = "rest", -): +def test_list_security_center_services_rest_flattened_error(transport: str = "rest"): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17285,63 +19749,215 @@ def test_delete_event_threat_detection_custom_module_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_event_threat_detection_custom_module( - security_center_management.DeleteEventThreatDetectionCustomModuleRequest(), - name="name_value", + client.list_security_center_services( + security_center_management.ListSecurityCenterServicesRequest(), + parent="parent_value", + ) + + +def test_list_security_center_services_rest_pager(transport: str = "rest"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + next_page_token="abc", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[], + next_page_token="def", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + ], + next_page_token="ghi", + ), + security_center_management.ListSecurityCenterServicesResponse( + security_center_services=[ + security_center_management.SecurityCenterService(), + security_center_management.SecurityCenterService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_center_management.ListSecurityCenterServicesResponse.to_json(x) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_security_center_services(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_center_management.SecurityCenterService) + for i in results + ) -def test_delete_event_threat_detection_custom_module_rest_error(): - client = SecurityCenterManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_security_center_services(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - security_center_management.ValidateEventThreatDetectionCustomModuleRequest, + security_center_management.UpdateSecurityCenterServiceRequest, dict, ], ) -def test_validate_event_threat_detection_custom_module_rest(request_type): +def test_update_security_center_service_rest(request_type): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "security_center_service": { + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3" + } + } + request_init["security_center_service"] = { + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3", + "intended_enablement_state": 1, + "effective_enablement_state": 1, + "modules": {}, + "update_time": {"seconds": 751, "nanos": 543}, + "service_config": {"fields": {}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + security_center_management.UpdateSecurityCenterServiceRequest.meta.fields[ + "security_center_service" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_center_service" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_center_service"][field])): + del request_init["security_center_service"][field][i][subfield] + else: + del request_init["security_center_service"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ValidateEventThreatDetectionCustomModuleResponse() + return_value = security_center_management.SecurityCenterService( + name="name_value", + intended_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, + effective_enablement_state=security_center_management.SecurityCenterService.EnablementState.INHERITED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ValidateEventThreatDetectionCustomModuleResponse.pb( - return_value - ) + return_value = security_center_management.SecurityCenterService.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.validate_event_threat_detection_custom_module(request) + response = client.update_security_center_service(request) # Establish that the response is the type that we expect. - assert isinstance( - response, - security_center_management.ValidateEventThreatDetectionCustomModuleResponse, + assert isinstance(response, security_center_management.SecurityCenterService) + assert response.name == "name_value" + assert ( + response.intended_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED + ) + assert ( + response.effective_enablement_state + == security_center_management.SecurityCenterService.EnablementState.INHERITED ) -def test_validate_event_threat_detection_custom_module_rest_use_cached_wrapped_rpc(): +def test_update_security_center_service_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17356,7 +19972,7 @@ def test_validate_event_threat_detection_custom_module_rest_use_cached_wrapped_r # Ensure method has been cached assert ( - client._transport.validate_event_threat_detection_custom_module + client._transport.update_security_center_service in client._transport._wrapped_methods ) @@ -17366,31 +19982,28 @@ def test_validate_event_threat_detection_custom_module_rest_use_cached_wrapped_r "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.validate_event_threat_detection_custom_module + client._transport.update_security_center_service ] = mock_rpc request = {} - client.validate_event_threat_detection_custom_module(request) + client.update_security_center_service(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.validate_event_threat_detection_custom_module(request) + client.update_security_center_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_validate_event_threat_detection_custom_module_rest_required_fields( - request_type=security_center_management.ValidateEventThreatDetectionCustomModuleRequest, +def test_update_security_center_service_rest_required_fields( + request_type=security_center_management.UpdateSecurityCenterServiceRequest, ): transport_class = transports.SecurityCenterManagementRestTransport request_init = {} - request_init["parent"] = "" - request_init["raw_text"] = "" - request_init["type_"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17401,31 +20014,24 @@ def test_validate_event_threat_detection_custom_module_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).validate_event_threat_detection_custom_module._get_unset_required_fields( - jsonified_request - ) + ).update_security_center_service._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["rawText"] = "raw_text_value" - jsonified_request["type"] = "type__value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).validate_event_threat_detection_custom_module._get_unset_required_fields( - jsonified_request + ).update_security_center_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "rawText" in jsonified_request - assert jsonified_request["rawText"] == "raw_text_value" - assert "type" in jsonified_request - assert jsonified_request["type"] == "type__value" client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17434,9 +20040,7 @@ def test_validate_event_threat_detection_custom_module_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - security_center_management.ValidateEventThreatDetectionCustomModuleResponse() - ) + return_value = security_center_management.SecurityCenterService() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17448,7 +20052,7 @@ def test_validate_event_threat_detection_custom_module_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -17458,7 +20062,7 @@ def test_validate_event_threat_detection_custom_module_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = security_center_management.ValidateEventThreatDetectionCustomModuleResponse.pb( + return_value = security_center_management.SecurityCenterService.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -17466,42 +20070,46 @@ def test_validate_event_threat_detection_custom_module_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.validate_event_threat_detection_custom_module(request) + response = client.update_security_center_service(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_validate_event_threat_detection_custom_module_rest_unset_required_fields(): +def test_update_security_center_service_rest_unset_required_fields(): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.validate_event_threat_detection_custom_module._get_unset_required_fields( + unset_fields = transport.update_security_center_service._get_unset_required_fields( {} ) assert set(unset_fields) == ( - set(()) + set( + ( + "updateMask", + "validateOnly", + ) + ) & set( ( - "parent", - "rawText", - "type", + "securityCenterService", + "updateMask", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_validate_event_threat_detection_custom_module_rest_interceptors( - null_interceptor, -): +def test_update_security_center_service_rest_interceptors(null_interceptor): transport = transports.SecurityCenterManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SecurityCenterManagementRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SecurityCenterManagementRestInterceptor() + ), ) client = SecurityCenterManagementClient(transport=transport) with mock.patch.object( @@ -17510,15 +20118,15 @@ def test_validate_event_threat_detection_custom_module_rest_interceptors( path_template, "transcode" ) as transcode, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "post_validate_event_threat_detection_custom_module", + "post_update_security_center_service", ) as post, mock.patch.object( transports.SecurityCenterManagementRestInterceptor, - "pre_validate_event_threat_detection_custom_module", + "pre_update_security_center_service", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = security_center_management.ValidateEventThreatDetectionCustomModuleRequest.pb( - security_center_management.ValidateEventThreatDetectionCustomModuleRequest() + pb_message = security_center_management.UpdateSecurityCenterServiceRequest.pb( + security_center_management.UpdateSecurityCenterServiceRequest() ) transcode.return_value = { "method": "post", @@ -17530,23 +20138,21 @@ def test_validate_event_threat_detection_custom_module_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = security_center_management.ValidateEventThreatDetectionCustomModuleResponse.to_json( - security_center_management.ValidateEventThreatDetectionCustomModuleResponse() + req.return_value._content = ( + security_center_management.SecurityCenterService.to_json( + security_center_management.SecurityCenterService() + ) ) - request = ( - security_center_management.ValidateEventThreatDetectionCustomModuleRequest() - ) + request = security_center_management.UpdateSecurityCenterServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - security_center_management.ValidateEventThreatDetectionCustomModuleResponse() - ) + post.return_value = security_center_management.SecurityCenterService() - client.validate_event_threat_detection_custom_module( + client.update_security_center_service( request, metadata=[ ("key", "val"), @@ -17558,9 +20164,9 @@ def test_validate_event_threat_detection_custom_module_rest_interceptors( post.assert_called_once() -def test_validate_event_threat_detection_custom_module_rest_bad_request( +def test_update_security_center_service_rest_bad_request( transport: str = "rest", - request_type=security_center_management.ValidateEventThreatDetectionCustomModuleRequest, + request_type=security_center_management.UpdateSecurityCenterServiceRequest, ): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17568,7 +20174,11 @@ def test_validate_event_threat_detection_custom_module_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "security_center_service": { + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17580,10 +20190,77 @@ def test_validate_event_threat_detection_custom_module_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.validate_event_threat_detection_custom_module(request) + client.update_security_center_service(request) -def test_validate_event_threat_detection_custom_module_rest_error(): +def test_update_security_center_service_rest_flattened(): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_center_management.SecurityCenterService() + + # get arguments that satisfy an http rule for this method + sample_request = { + "security_center_service": { + "name": "projects/sample1/locations/sample2/securityCenterServices/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + security_center_service=security_center_management.SecurityCenterService( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_center_management.SecurityCenterService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_security_center_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{security_center_service.name=projects/*/locations/*/securityCenterServices/*}" + % client.transport._host, + args[1], + ) + + +def test_update_security_center_service_rest_flattened_error(transport: str = "rest"): + client = SecurityCenterManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_security_center_service( + security_center_management.UpdateSecurityCenterServiceRequest(), + security_center_service=security_center_management.SecurityCenterService( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_security_center_service_rest_error(): client = SecurityCenterManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17746,6 +20423,9 @@ def test_security_center_management_base_transport(): "update_event_threat_detection_custom_module", "delete_event_threat_detection_custom_module", "validate_event_threat_detection_custom_module", + "get_security_center_service", + "list_security_center_services", + "update_security_center_service", "get_location", "list_locations", ) @@ -18092,6 +20772,15 @@ def test_security_center_management_client_transport_session_collision(transport session1 = client1.transport.validate_event_threat_detection_custom_module._session session2 = client2.transport.validate_event_threat_detection_custom_module._session assert session1 != session2 + session1 = client1.transport.get_security_center_service._session + session2 = client2.transport.get_security_center_service._session + assert session1 != session2 + session1 = client1.transport.list_security_center_services._session + session2 = client2.transport.list_security_center_services._session + assert session1 != session2 + session1 = client1.transport.update_security_center_service._session + session2 = client2.transport.update_security_center_service._session + assert session1 != session2 def test_security_center_management_grpc_transport_channel(): @@ -18346,10 +21035,38 @@ def test_parse_finding_path(): assert expected == actual -def test_security_health_analytics_custom_module_path(): - organization = "squid" +def test_security_center_service_path(): + project = "squid" location = "clam" - security_health_analytics_custom_module = "whelk" + service = "whelk" + expected = "projects/{project}/locations/{location}/securityCenterServices/{service}".format( + project=project, + location=location, + service=service, + ) + actual = SecurityCenterManagementClient.security_center_service_path( + project, location, service + ) + assert expected == actual + + +def test_parse_security_center_service_path(): + expected = { + "project": "octopus", + "location": "oyster", + "service": "nudibranch", + } + path = SecurityCenterManagementClient.security_center_service_path(**expected) + + # Check that the path construction is reversible. + actual = SecurityCenterManagementClient.parse_security_center_service_path(path) + assert expected == actual + + +def test_security_health_analytics_custom_module_path(): + organization = "cuttlefish" + location = "mussel" + security_health_analytics_custom_module = "winkle" expected = "organizations/{organization}/locations/{location}/securityHealthAnalyticsCustomModules/{security_health_analytics_custom_module}".format( organization=organization, location=location, @@ -18365,9 +21082,9 @@ def test_security_health_analytics_custom_module_path(): def test_parse_security_health_analytics_custom_module_path(): expected = { - "organization": "octopus", - "location": "oyster", - "security_health_analytics_custom_module": "nudibranch", + "organization": "nautilus", + "location": "scallop", + "security_health_analytics_custom_module": "abalone", } path = SecurityCenterManagementClient.security_health_analytics_custom_module_path( **expected @@ -18381,7 +21098,7 @@ def test_parse_security_health_analytics_custom_module_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -18391,7 +21108,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = SecurityCenterManagementClient.common_billing_account_path(**expected) @@ -18401,7 +21118,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -18411,7 +21128,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = SecurityCenterManagementClient.common_folder_path(**expected) @@ -18421,7 +21138,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -18431,7 +21148,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = SecurityCenterManagementClient.common_organization_path(**expected) @@ -18441,7 +21158,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -18451,7 +21168,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = SecurityCenterManagementClient.common_project_path(**expected) @@ -18461,8 +21178,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -18473,8 +21190,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = SecurityCenterManagementClient.common_location_path(**expected) diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py index 2ee129a72b45..93b6d50fbbd8 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py @@ -137,7 +137,7 @@ class ConsumerInfo(proto.Message): project_number (int): The Google cloud project number, e.g. 1234567890. A value of 0 indicates no project - number is found. + number is found. NOTE: This field is deprecated after we support flexible consumer id. New code should not depend diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py index 4dbfb640abb7..558c8aab67c5 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py index 4dbfb640abb7..558c8aab67c5 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py index 4dbfb640abb7..558c8aab67c5 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json index f6573864bd97..a0b20f7bef18 100644 --- a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json +++ b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-directory", - "version": "1.11.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json index 0c712a3b2269..8cb020ecf895 100644 --- a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json +++ b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-directory", - "version": "1.11.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py index 8058cf3b7373..b60e1c8f4aa9 100644 --- a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py +++ b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py @@ -2008,13 +2008,13 @@ def test_list_namespaces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_namespaces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4077,13 +4077,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6156,13 +6156,13 @@ def test_list_endpoints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_endpoints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py index 3773deb4d6b6..b755fbf03f1e 100644 --- a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py +++ b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py @@ -2009,13 +2009,13 @@ def test_list_namespaces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_namespaces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4078,13 +4078,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6157,13 +6157,13 @@ def test_list_endpoints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_endpoints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py index 047e9704ddd3..9529efbb4987 100644 --- a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py +++ b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py @@ -1523,10 +1523,10 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3524,13 +3524,13 @@ def test_list_service_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("service_name", ""),)), ) pager = client.list_service_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5381,13 +5381,13 @@ def test_list_service_rollouts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("service_name", ""),)), ) pager = client.list_service_rollouts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py b/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py index c379d9685a17..3bdc8ae8df02 100644 --- a/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py +++ b/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py @@ -2304,13 +2304,13 @@ def test_list_services_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_services(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py index 41795f7be861..3e38e0722381 100644 --- a/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py +++ b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py @@ -1529,13 +1529,13 @@ def test_list_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2536,13 +2536,13 @@ def test_list_organization_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_organization_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3576,13 +3576,13 @@ def test_list_organization_impacts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_organization_impacts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py index 67c6950a9525..4051438c9085 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py @@ -2270,13 +2270,13 @@ def test_list_phrase_set_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_phrase_set(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4423,13 +4423,13 @@ def test_list_custom_classes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_classes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py index 949f4c4a6fdc..206c957ebb31 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py @@ -2270,13 +2270,13 @@ def test_list_phrase_set_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_phrase_set(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4423,13 +4423,13 @@ def test_list_custom_classes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_classes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py index 51700ffbeeba..69964be92fbb 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py @@ -1883,13 +1883,13 @@ def test_list_recognizers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_recognizers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6169,13 +6169,13 @@ def test_list_custom_classes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_classes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8740,13 +8740,13 @@ def test_list_phrase_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_phrase_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-storage-control/.repo-metadata.json b/packages/google-cloud-storage-control/.repo-metadata.json index 26074d1b7a9a..3a9972009fd5 100644 --- a/packages/google-cloud-storage-control/.repo-metadata.json +++ b/packages/google-cloud-storage-control/.repo-metadata.json @@ -5,7 +5,7 @@ "product_documentation": "https://cloud.google.com/storage/docs/reference/rpc/google.storage.control.v2", "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-storage-control/latest", "issue_tracker": "https://issuetracker.google.com/issues/new?component=187243&template=1162869", - "release_level": "preview", + "release_level": "stable", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/google-cloud-python", diff --git a/packages/google-cloud-storage-control/CHANGELOG.md b/packages/google-cloud-storage-control/CHANGELOG.md index 210d28cf9ca3..e1d044a373a7 100644 --- a/packages/google-cloud-storage-control/CHANGELOG.md +++ b/packages/google-cloud-storage-control/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.0.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-storage-control-v1.0.0...google-cloud-storage-control-v1.0.1) (2024-06-24) + + +### Documentation + +* remove allowlist note from Folders RPCs ([41a3afd](https://github.com/googleapis/google-cloud-python/commit/41a3afda46a7c3f02bff3f92f15cd49daf92083e)) + +## [1.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-storage-control-v0.2.0...google-cloud-storage-control-v1.0.0) (2024-06-19) + + +### Features + +* bump release level to production/stable ([4a15440](https://github.com/googleapis/google-cloud-python/commit/4a154403f07321af6ea051fa81b58ee2651de34f)) + ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-storage-control-v0.1.3...google-cloud-storage-control-v0.2.0) (2024-05-07) diff --git a/packages/google-cloud-storage-control/README.rst b/packages/google-cloud-storage-control/README.rst index eaafbaf329dc..3e4fe7d3258c 100644 --- a/packages/google-cloud-storage-control/README.rst +++ b/packages/google-cloud-storage-control/README.rst @@ -1,14 +1,14 @@ Python Client for Storage Control API ===================================== -|preview| |pypi| |versions| +|stable| |pypi| |versions| `Storage Control API`_: Lets you perform metadata-specific, control plane, and long-running operations apart from the Storage API. Separating these operations from the Storage API improves API standardization and lets you run faster releases. - `Client Library Documentation`_ - `Product Documentation`_ -.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-storage-control.svg :target: https://pypi.org/project/google-cloud-storage-control/ diff --git a/packages/google-cloud-storage-control/docs/index.rst b/packages/google-cloud-storage-control/docs/index.rst index 78fa0cfde468..fb858cb6a5f3 100644 --- a/packages/google-cloud-storage-control/docs/index.rst +++ b/packages/google-cloud-storage-control/docs/index.rst @@ -21,3 +21,8 @@ For a list of all ``google-cloud-storage-control`` releases: :maxdepth: 2 CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-storage-control/docs/summary_overview.md b/packages/google-cloud-storage-control/docs/summary_overview.md new file mode 100644 index 000000000000..27c41565a47a --- /dev/null +++ b/packages/google-cloud-storage-control/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Storage Control API API + +Overview of the APIs available for Storage Control API API. + +## All entries + +Classes, methods and properties & attributes for +Storage Control API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-storage-control/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-storage-control/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-storage-control/latest/summary_property.html) diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py b/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py index 558c8aab67c5..4d5271385a9a 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.0.1" # {x-release-please-version} diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py index 558c8aab67c5..4d5271385a9a 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.0.1" # {x-release-please-version} diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py index 167fc9687889..099b0af28ca1 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py @@ -286,7 +286,6 @@ async def create_folder( ) -> storage_control.Folder: r"""Creates a new folder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. code-block:: python @@ -320,8 +319,6 @@ async def sample_create_folder(): The request object. Request message for CreateFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. parent (:class:`str`): Required. Name of the bucket in which the folder will reside. The bucket must @@ -361,8 +358,7 @@ async def sample_create_folder(): google.cloud.storage_control_v2.types.Folder: A folder resource. This resource can only exist in a hierarchical namespace - enabled bucket. Hierarchical namespace - buckets are in allowlist preview. + enabled bucket. """ # Create or coerce a protobuf request object. @@ -423,8 +419,7 @@ async def delete_folder( ) -> None: r"""Permanently deletes an empty folder. This operation is only applicable to a hierarchical namespace enabled - bucket. Hierarchical namespace buckets are in allowlist - preview. + bucket. .. code-block:: python @@ -454,8 +449,6 @@ async def sample_delete_folder(): The request object. Request message for DeleteFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. name (:class:`str`): Required. Name of the folder. Format: ``projects/{project}/buckets/{bucket}/folders/{folder}`` @@ -520,8 +513,7 @@ async def get_folder( ) -> storage_control.Folder: r"""Returns metadata for the specified folder. This operation is only applicable to a hierarchical namespace - enabled bucket. Hierarchical namespace buckets are in - allowlist preview. + enabled bucket. .. code-block:: python @@ -554,8 +546,6 @@ async def sample_get_folder(): The request object. Request message for GetFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. name (:class:`str`): Required. Name of the folder. Format: ``projects/{project}/buckets/{bucket}/folders/{folder}`` @@ -573,8 +563,7 @@ async def sample_get_folder(): google.cloud.storage_control_v2.types.Folder: A folder resource. This resource can only exist in a hierarchical namespace - enabled bucket. Hierarchical namespace - buckets are in allowlist preview. + enabled bucket. """ # Create or coerce a protobuf request object. @@ -631,7 +620,6 @@ async def list_folders( ) -> pagers.ListFoldersAsyncPager: r"""Retrieves a list of folders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. code-block:: python @@ -665,8 +653,6 @@ async def sample_list_folders(): The request object. Request message for ListFolders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. parent (:class:`str`): Required. Name of the bucket in which to look for folders. The bucket must be @@ -754,7 +740,6 @@ async def rename_folder( enabled bucket. During a rename, the source and destination folders are locked until the long running operation completes. - Hierarchical namespace buckets are in allowlist preview. .. code-block:: python @@ -792,8 +777,6 @@ async def sample_rename_folder(): The request object. Request message for RenameFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. name (:class:`str`): Required. Name of the source folder being renamed. Format: @@ -818,8 +801,7 @@ async def sample_rename_folder(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.storage_control_v2.types.Folder` A folder resource. This resource can only exist in a hierarchical namespace - enabled bucket. Hierarchical namespace buckets are in - allowlist preview. + enabled bucket. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py index 2d20feebe4b5..1b3204fd93ae 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py @@ -737,7 +737,6 @@ def create_folder( ) -> storage_control.Folder: r"""Creates a new folder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. code-block:: python @@ -771,8 +770,6 @@ def sample_create_folder(): The request object. Request message for CreateFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. parent (str): Required. Name of the bucket in which the folder will reside. The bucket must @@ -812,8 +809,7 @@ def sample_create_folder(): google.cloud.storage_control_v2.types.Folder: A folder resource. This resource can only exist in a hierarchical namespace - enabled bucket. Hierarchical namespace - buckets are in allowlist preview. + enabled bucket. """ # Create or coerce a protobuf request object. @@ -883,8 +879,7 @@ def delete_folder( ) -> None: r"""Permanently deletes an empty folder. This operation is only applicable to a hierarchical namespace enabled - bucket. Hierarchical namespace buckets are in allowlist - preview. + bucket. .. code-block:: python @@ -914,8 +909,6 @@ def sample_delete_folder(): The request object. Request message for DeleteFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. name (str): Required. Name of the folder. Format: ``projects/{project}/buckets/{bucket}/folders/{folder}`` @@ -991,8 +984,7 @@ def get_folder( ) -> storage_control.Folder: r"""Returns metadata for the specified folder. This operation is only applicable to a hierarchical namespace - enabled bucket. Hierarchical namespace buckets are in - allowlist preview. + enabled bucket. .. code-block:: python @@ -1025,8 +1017,6 @@ def sample_get_folder(): The request object. Request message for GetFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. name (str): Required. Name of the folder. Format: ``projects/{project}/buckets/{bucket}/folders/{folder}`` @@ -1044,8 +1034,7 @@ def sample_get_folder(): google.cloud.storage_control_v2.types.Folder: A folder resource. This resource can only exist in a hierarchical namespace - enabled bucket. Hierarchical namespace - buckets are in allowlist preview. + enabled bucket. """ # Create or coerce a protobuf request object. @@ -1113,7 +1102,6 @@ def list_folders( ) -> pagers.ListFoldersPager: r"""Retrieves a list of folders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. code-block:: python @@ -1147,8 +1135,6 @@ def sample_list_folders(): The request object. Request message for ListFolders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. parent (str): Required. Name of the bucket in which to look for folders. The bucket must be @@ -1245,7 +1231,6 @@ def rename_folder( enabled bucket. During a rename, the source and destination folders are locked until the long running operation completes. - Hierarchical namespace buckets are in allowlist preview. .. code-block:: python @@ -1283,8 +1268,6 @@ def sample_rename_folder(): The request object. Request message for RenameFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in - allowlist preview. name (str): Required. Name of the source folder being renamed. Format: @@ -1309,8 +1292,7 @@ def sample_rename_folder(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.storage_control_v2.types.Folder` A folder resource. This resource can only exist in a hierarchical namespace - enabled bucket. Hierarchical namespace buckets are in - allowlist preview. + enabled bucket. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc.py index 2a20d44ef2e8..778a994e6b7c 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc.py @@ -259,7 +259,6 @@ def create_folder( Creates a new folder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Returns: Callable[[~.CreateFolderRequest], @@ -287,8 +286,7 @@ def delete_folder( Permanently deletes an empty folder. This operation is only applicable to a hierarchical namespace enabled - bucket. Hierarchical namespace buckets are in allowlist - preview. + bucket. Returns: Callable[[~.DeleteFolderRequest], @@ -316,8 +314,7 @@ def get_folder( Returns metadata for the specified folder. This operation is only applicable to a hierarchical namespace - enabled bucket. Hierarchical namespace buckets are in - allowlist preview. + enabled bucket. Returns: Callable[[~.GetFolderRequest], @@ -347,7 +344,6 @@ def list_folders( Retrieves a list of folders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Returns: Callable[[~.ListFoldersRequest], @@ -378,7 +374,6 @@ def rename_folder( enabled bucket. During a rename, the source and destination folders are locked until the long running operation completes. - Hierarchical namespace buckets are in allowlist preview. Returns: Callable[[~.RenameFolderRequest], diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc_asyncio.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc_asyncio.py index f352d367ee96..8da2e9eb2eed 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc_asyncio.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/transports/grpc_asyncio.py @@ -267,7 +267,6 @@ def create_folder( Creates a new folder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Returns: Callable[[~.CreateFolderRequest], @@ -295,8 +294,7 @@ def delete_folder( Permanently deletes an empty folder. This operation is only applicable to a hierarchical namespace enabled - bucket. Hierarchical namespace buckets are in allowlist - preview. + bucket. Returns: Callable[[~.DeleteFolderRequest], @@ -326,8 +324,7 @@ def get_folder( Returns metadata for the specified folder. This operation is only applicable to a hierarchical namespace - enabled bucket. Hierarchical namespace buckets are in - allowlist preview. + enabled bucket. Returns: Callable[[~.GetFolderRequest], @@ -358,7 +355,6 @@ def list_folders( Retrieves a list of folders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Returns: Callable[[~.ListFoldersRequest], @@ -391,7 +387,6 @@ def rename_folder( enabled bucket. During a rename, the source and destination folders are locked until the long running operation completes. - Hierarchical namespace buckets are in allowlist preview. Returns: Callable[[~.RenameFolderRequest], diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/types/storage_control.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/types/storage_control.py index 7d1af2a8b4e6..2f91e06f3de7 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/types/storage_control.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/types/storage_control.py @@ -63,7 +63,6 @@ class PendingRenameInfo(proto.Message): class Folder(proto.Message): r"""A folder resource. This resource can only exist in a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Attributes: name (str): @@ -116,7 +115,6 @@ class Folder(proto.Message): class GetFolderRequest(proto.Message): r"""Request message for GetFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -166,7 +164,6 @@ class GetFolderRequest(proto.Message): class CreateFolderRequest(proto.Message): r"""Request message for CreateFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Attributes: parent (str): @@ -220,7 +217,6 @@ class CreateFolderRequest(proto.Message): class DeleteFolderRequest(proto.Message): r"""Request message for DeleteFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -270,7 +266,6 @@ class DeleteFolderRequest(proto.Message): class ListFoldersRequest(proto.Message): r"""Request message for ListFolders. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. Attributes: parent (str): @@ -379,7 +374,6 @@ def raw_page(self): class RenameFolderRequest(proto.Message): r"""Request message for RenameFolder. This operation is only applicable to a hierarchical namespace enabled bucket. - Hierarchical namespace buckets are in allowlist preview. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields diff --git a/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json b/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json index 932c0e2b46a6..79674271bd69 100644 --- a/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json +++ b/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-storage-control", - "version": "0.1.0" + "version": "1.0.1" }, "snippets": [ { diff --git a/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py b/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py index 0356b87557af..8d39e3bde21a 100644 --- a/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py +++ b/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py @@ -2599,10 +2599,10 @@ def test_list_folders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_folders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5127,10 +5127,10 @@ def test_list_managed_folders_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_managed_folders(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py b/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py index fa13dc7bf49d..44a284ff6023 100644 --- a/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py +++ b/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py @@ -2691,10 +2691,10 @@ def test_list_transfer_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_transfer_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5633,13 +5633,13 @@ def test_list_agent_pools_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) pager = client.list_agent_pools(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py index 193591f8d9a0..adf1268172bc 100644 --- a/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py +++ b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py @@ -1589,13 +1589,13 @@ def test_list_report_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_report_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3778,13 +3778,13 @@ def test_list_report_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_report_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py index b106ee203ac4..22bdbec55a2b 100644 --- a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py @@ -1597,13 +1597,13 @@ def test_list_attachments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_attachments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py index 775ebadf1bb0..ccc5cc5d61dd 100644 --- a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py @@ -1905,13 +1905,13 @@ def test_list_cases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_cases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2386,13 +2386,13 @@ def test_search_cases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_cases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4336,10 +4336,10 @@ def test_search_case_classifications_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_case_classifications(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py index 0f892255e93a..41008cd952b3 100644 --- a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py @@ -1528,13 +1528,13 @@ def test_list_comments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_comments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py index c66e657179b0..03d384b73170 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py @@ -3193,13 +3193,13 @@ def test_list_companies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_companies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py index 67783112a212..3eea704ecfe9 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py @@ -4440,13 +4440,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py index 6d02249ffe1a..f5fc634c2d84 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py @@ -3004,13 +3004,13 @@ def test_list_tenants_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tenants(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py index 0815252839f8..8919975d1d1e 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py @@ -3183,13 +3183,13 @@ def test_list_companies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_companies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py index 4a43509dc361..09bdd1a9bca5 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py @@ -4416,13 +4416,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4912,13 +4912,13 @@ def test_search_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5433,13 +5433,13 @@ def test_search_jobs_for_alert_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_jobs_for_alert(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py index b19e4cdd3773..deeea4fce794 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py @@ -3054,13 +3054,13 @@ def test_list_tenants_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tenants(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py index 67e7c4766b92..1363b7028b54 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py @@ -1500,13 +1500,13 @@ def test_list_queues_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_queues(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5842,13 +5842,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py index bb656fe604da..4dc63ed5ff9c 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -1501,13 +1501,13 @@ def test_list_queues_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_queues(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6069,13 +6069,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py index 02a944a7d0d2..304f7ec6a8cd 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -1500,13 +1500,13 @@ def test_list_queues_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_queues(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5872,13 +5872,13 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py index e558e030dd6e..4a3447d264db 100644 --- a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py +++ b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py @@ -1597,13 +1597,13 @@ def test_list_orchestration_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_orchestration_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3398,13 +3398,13 @@ def test_list_edge_slms_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_edge_slms(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6734,13 +6734,13 @@ def test_list_blueprints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_blueprints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8592,13 +8592,13 @@ def test_list_blueprint_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_blueprint_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9197,13 +9197,13 @@ def test_search_blueprint_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_blueprint_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9802,13 +9802,13 @@ def test_search_deployment_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_deployment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10780,13 +10780,13 @@ def test_list_public_blueprints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_public_blueprints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13454,13 +13454,13 @@ def test_list_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14041,13 +14041,13 @@ def test_list_deployment_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_deployment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16685,13 +16685,13 @@ def test_list_hydrated_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_hydrated_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py index b350bc6c6bd9..3fcf5d6e2af5 100644 --- a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py +++ b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py @@ -1597,13 +1597,13 @@ def test_list_orchestration_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_orchestration_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3398,13 +3398,13 @@ def test_list_edge_slms_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_edge_slms(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6734,13 +6734,13 @@ def test_list_blueprints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_blueprints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8592,13 +8592,13 @@ def test_list_blueprint_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_blueprint_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9197,13 +9197,13 @@ def test_search_blueprint_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_blueprint_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9802,13 +9802,13 @@ def test_search_deployment_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search_deployment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10780,13 +10780,13 @@ def test_list_public_blueprints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_public_blueprints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13454,13 +13454,13 @@ def test_list_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14041,13 +14041,13 @@ def test_list_deployment_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_deployment_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16685,13 +16685,13 @@ def test_list_hydrated_deployments_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_hydrated_deployments(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-texttospeech/texttospeech-v1-py.tar.gz b/packages/google-cloud-texttospeech/texttospeech-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py index 970bb1431457..21ea15104f54 100644 --- a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py +++ b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py @@ -1419,13 +1419,13 @@ def test_list_nodes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_nodes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4062,13 +4062,13 @@ def test_list_tensor_flow_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tensor_flow_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5060,13 +5060,13 @@ def test_list_accelerator_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_accelerator_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py index 45a140a360a2..9c77562f3725 100644 --- a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py +++ b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py @@ -1444,13 +1444,13 @@ def test_list_nodes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_nodes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4466,13 +4466,13 @@ def test_list_accelerator_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_accelerator_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5463,13 +5463,13 @@ def test_list_runtime_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_runtime_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py index 50b573dacf9d..e431e3370f8d 100644 --- a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py +++ b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py @@ -1425,13 +1425,13 @@ def test_list_nodes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_nodes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4148,13 +4148,13 @@ def test_list_queued_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_queued_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6646,13 +6646,13 @@ def test_list_accelerator_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_accelerator_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7643,13 +7643,13 @@ def test_list_runtime_versions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_runtime_versions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py b/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py index 5c148e6584e6..5c34ab184472 100644 --- a/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py +++ b/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py @@ -1510,13 +1510,13 @@ def test_list_traces_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) pager = client.list_traces(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py index a3e211f5f741..e2335fde626b 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py @@ -4301,13 +4301,13 @@ def test_list_glossaries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_glossaries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6852,13 +6852,13 @@ def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_adaptive_mt_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9011,13 +9011,13 @@ def test_list_adaptive_mt_files_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_adaptive_mt_files(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9604,13 +9604,13 @@ def test_list_adaptive_mt_sentences_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_adaptive_mt_sentences(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py index 4de0a5394140..c8e414db0039 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py @@ -4173,13 +4173,13 @@ def test_list_glossaries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_glossaries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py b/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py index af72a11e8612..5459f09d1e5b 100644 --- a/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py +++ b/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py @@ -1982,13 +1982,13 @@ def test_list_channels_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channels(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4812,13 +4812,13 @@ def test_list_inputs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_inputs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6909,13 +6909,13 @@ def test_list_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9344,13 +9344,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-video-stitcher/CHANGELOG.md b/packages/google-cloud-video-stitcher/CHANGELOG.md index 7994fdf44764..ccfa80dcc058 100644 --- a/packages/google-cloud-video-stitcher/CHANGELOG.md +++ b/packages/google-cloud-video-stitcher/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.7.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-video-stitcher-v0.7.9...google-cloud-video-stitcher-v0.7.10) (2024-05-27) + + +### Features + +* add apis for Create, Read, Update, Delete for VODConfigs ([d100eea](https://github.com/googleapis/google-cloud-python/commit/d100eea3c950a8ed40ec33230bd2af163616332b)) +* added adtracking to Livesession ([d100eea](https://github.com/googleapis/google-cloud-python/commit/d100eea3c950a8ed40ec33230bd2af163616332b)) +* added fetchoptions with custom headers for Live and VODConfigs ([d100eea](https://github.com/googleapis/google-cloud-python/commit/d100eea3c950a8ed40ec33230bd2af163616332b)) +* added targetting parameter support to Livesession ([d100eea](https://github.com/googleapis/google-cloud-python/commit/d100eea3c950a8ed40ec33230bd2af163616332b)) +* added token config for MediaCdnKey ([d100eea](https://github.com/googleapis/google-cloud-python/commit/d100eea3c950a8ed40ec33230bd2af163616332b)) +* allowed usage for VODConfigs in VODSession ([d100eea](https://github.com/googleapis/google-cloud-python/commit/d100eea3c950a8ed40ec33230bd2af163616332b)) + ## [0.7.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-video-stitcher-v0.7.8...google-cloud-video-stitcher-v0.7.9) (2024-03-05) diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/__init__.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/__init__.py index 87c8901c0272..6b287af96dba 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/__init__.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/__init__.py @@ -45,6 +45,7 @@ StaticAdResource, ) from google.cloud.video.stitcher_v1.types.events import Event, ProgressEvent +from google.cloud.video.stitcher_v1.types.fetch_options import FetchOptions from google.cloud.video.stitcher_v1.types.live_configs import ( AdTracking, GamLiveConfig, @@ -71,16 +72,19 @@ CreateLiveConfigRequest, CreateLiveSessionRequest, CreateSlateRequest, + CreateVodConfigRequest, CreateVodSessionRequest, DeleteCdnKeyRequest, DeleteLiveConfigRequest, DeleteSlateRequest, + DeleteVodConfigRequest, GetCdnKeyRequest, GetLiveAdTagDetailRequest, GetLiveConfigRequest, GetLiveSessionRequest, GetSlateRequest, GetVodAdTagDetailRequest, + GetVodConfigRequest, GetVodSessionRequest, GetVodStitchDetailRequest, ListCdnKeysRequest, @@ -93,12 +97,17 @@ ListSlatesResponse, ListVodAdTagDetailsRequest, ListVodAdTagDetailsResponse, + ListVodConfigsRequest, + ListVodConfigsResponse, ListVodStitchDetailsRequest, ListVodStitchDetailsResponse, OperationMetadata, UpdateCdnKeyRequest, + UpdateLiveConfigRequest, UpdateSlateRequest, + UpdateVodConfigRequest, ) +from google.cloud.video.stitcher_v1.types.vod_configs import GamVodConfig, VodConfig __all__ = ( "VideoStitcherServiceClient", @@ -119,6 +128,7 @@ "StaticAdResource", "Event", "ProgressEvent", + "FetchOptions", "GamLiveConfig", "LiveConfig", "PrefetchConfig", @@ -138,16 +148,19 @@ "CreateLiveConfigRequest", "CreateLiveSessionRequest", "CreateSlateRequest", + "CreateVodConfigRequest", "CreateVodSessionRequest", "DeleteCdnKeyRequest", "DeleteLiveConfigRequest", "DeleteSlateRequest", + "DeleteVodConfigRequest", "GetCdnKeyRequest", "GetLiveAdTagDetailRequest", "GetLiveConfigRequest", "GetLiveSessionRequest", "GetSlateRequest", "GetVodAdTagDetailRequest", + "GetVodConfigRequest", "GetVodSessionRequest", "GetVodStitchDetailRequest", "ListCdnKeysRequest", @@ -160,9 +173,15 @@ "ListSlatesResponse", "ListVodAdTagDetailsRequest", "ListVodAdTagDetailsResponse", + "ListVodConfigsRequest", + "ListVodConfigsResponse", "ListVodStitchDetailsRequest", "ListVodStitchDetailsResponse", "OperationMetadata", "UpdateCdnKeyRequest", + "UpdateLiveConfigRequest", "UpdateSlateRequest", + "UpdateVodConfigRequest", + "GamVodConfig", + "VodConfig", ) diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/__init__.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/__init__.py index 851d7ddfe301..720df0f1ed5c 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/__init__.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/__init__.py @@ -38,6 +38,7 @@ StaticAdResource, ) from .types.events import Event, ProgressEvent +from .types.fetch_options import FetchOptions from .types.live_configs import AdTracking, GamLiveConfig, LiveConfig, PrefetchConfig from .types.sessions import ( Interstitials, @@ -56,16 +57,19 @@ CreateLiveConfigRequest, CreateLiveSessionRequest, CreateSlateRequest, + CreateVodConfigRequest, CreateVodSessionRequest, DeleteCdnKeyRequest, DeleteLiveConfigRequest, DeleteSlateRequest, + DeleteVodConfigRequest, GetCdnKeyRequest, GetLiveAdTagDetailRequest, GetLiveConfigRequest, GetLiveSessionRequest, GetSlateRequest, GetVodAdTagDetailRequest, + GetVodConfigRequest, GetVodSessionRequest, GetVodStitchDetailRequest, ListCdnKeysRequest, @@ -78,12 +82,17 @@ ListSlatesResponse, ListVodAdTagDetailsRequest, ListVodAdTagDetailsResponse, + ListVodConfigsRequest, + ListVodConfigsResponse, ListVodStitchDetailsRequest, ListVodStitchDetailsResponse, OperationMetadata, UpdateCdnKeyRequest, + UpdateLiveConfigRequest, UpdateSlateRequest, + UpdateVodConfigRequest, ) +from .types.vod_configs import GamVodConfig, VodConfig __all__ = ( "VideoStitcherServiceAsyncClient", @@ -98,18 +107,23 @@ "CreateLiveConfigRequest", "CreateLiveSessionRequest", "CreateSlateRequest", + "CreateVodConfigRequest", "CreateVodSessionRequest", "DeleteCdnKeyRequest", "DeleteLiveConfigRequest", "DeleteSlateRequest", + "DeleteVodConfigRequest", "Event", + "FetchOptions", "GamLiveConfig", + "GamVodConfig", "GetCdnKeyRequest", "GetLiveAdTagDetailRequest", "GetLiveConfigRequest", "GetLiveSessionRequest", "GetSlateRequest", "GetVodAdTagDetailRequest", + "GetVodConfigRequest", "GetVodSessionRequest", "GetVodStitchDetailRequest", "GoogleCdnKey", @@ -126,6 +140,8 @@ "ListSlatesResponse", "ListVodAdTagDetailsRequest", "ListVodAdTagDetailsResponse", + "ListVodConfigsRequest", + "ListVodConfigsResponse", "ListVodStitchDetailsRequest", "ListVodStitchDetailsResponse", "LiveAdTagDetail", @@ -142,9 +158,12 @@ "Slate", "StaticAdResource", "UpdateCdnKeyRequest", + "UpdateLiveConfigRequest", "UpdateSlateRequest", + "UpdateVodConfigRequest", "VideoStitcherServiceClient", "VodAdTagDetail", + "VodConfig", "VodSession", "VodSessionAd", "VodSessionAdBreak", diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_metadata.json b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_metadata.json index 647665de41ca..51d9ffec66d1 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_metadata.json +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_metadata.json @@ -30,6 +30,11 @@ "create_slate" ] }, + "CreateVodConfig": { + "methods": [ + "create_vod_config" + ] + }, "CreateVodSession": { "methods": [ "create_vod_session" @@ -50,6 +55,11 @@ "delete_slate" ] }, + "DeleteVodConfig": { + "methods": [ + "delete_vod_config" + ] + }, "GetCdnKey": { "methods": [ "get_cdn_key" @@ -80,6 +90,11 @@ "get_vod_ad_tag_detail" ] }, + "GetVodConfig": { + "methods": [ + "get_vod_config" + ] + }, "GetVodSession": { "methods": [ "get_vod_session" @@ -115,6 +130,11 @@ "list_vod_ad_tag_details" ] }, + "ListVodConfigs": { + "methods": [ + "list_vod_configs" + ] + }, "ListVodStitchDetails": { "methods": [ "list_vod_stitch_details" @@ -125,10 +145,20 @@ "update_cdn_key" ] }, + "UpdateLiveConfig": { + "methods": [ + "update_live_config" + ] + }, "UpdateSlate": { "methods": [ "update_slate" ] + }, + "UpdateVodConfig": { + "methods": [ + "update_vod_config" + ] } } }, @@ -155,6 +185,11 @@ "create_slate" ] }, + "CreateVodConfig": { + "methods": [ + "create_vod_config" + ] + }, "CreateVodSession": { "methods": [ "create_vod_session" @@ -175,6 +210,11 @@ "delete_slate" ] }, + "DeleteVodConfig": { + "methods": [ + "delete_vod_config" + ] + }, "GetCdnKey": { "methods": [ "get_cdn_key" @@ -205,6 +245,11 @@ "get_vod_ad_tag_detail" ] }, + "GetVodConfig": { + "methods": [ + "get_vod_config" + ] + }, "GetVodSession": { "methods": [ "get_vod_session" @@ -240,6 +285,11 @@ "list_vod_ad_tag_details" ] }, + "ListVodConfigs": { + "methods": [ + "list_vod_configs" + ] + }, "ListVodStitchDetails": { "methods": [ "list_vod_stitch_details" @@ -250,10 +300,20 @@ "update_cdn_key" ] }, + "UpdateLiveConfig": { + "methods": [ + "update_live_config" + ] + }, "UpdateSlate": { "methods": [ "update_slate" ] + }, + "UpdateVodConfig": { + "methods": [ + "update_vod_config" + ] } } } diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py index c88badcb30cb..84ddc9126f64 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py @@ -53,11 +53,13 @@ from google.cloud.video.stitcher_v1.types import ( ad_tag_details, cdn_keys, + fetch_options, live_configs, sessions, slates, stitch_details, video_stitcher_service, + vod_configs, ) from .client import VideoStitcherServiceClient @@ -106,6 +108,10 @@ class VideoStitcherServiceAsyncClient: parse_vod_ad_tag_detail_path = staticmethod( VideoStitcherServiceClient.parse_vod_ad_tag_detail_path ) + vod_config_path = staticmethod(VideoStitcherServiceClient.vod_config_path) + parse_vod_config_path = staticmethod( + VideoStitcherServiceClient.parse_vod_config_path + ) vod_session_path = staticmethod(VideoStitcherServiceClient.vod_session_path) parse_vod_session_path = staticmethod( VideoStitcherServiceClient.parse_vod_session_path @@ -998,8 +1004,6 @@ async def sample_create_vod_session(): # Initialize request argument(s) vod_session = stitcher_v1.VodSession() - vod_session.source_uri = "source_uri_value" - vod_session.ad_tag_uri = "ad_tag_uri_value" vod_session.ad_tracking = "SERVER" request = stitcher_v1.CreateVodSessionRequest( @@ -1387,7 +1391,7 @@ async def sample_get_vod_stitch_detail(): Returns: google.cloud.video.stitcher_v1.types.VodStitchDetail: - Detailed information related to the + Information related to the interstitial of a VOD session. This resource is only available for VOD sessions that do not implement Google Ad @@ -1857,8 +1861,11 @@ async def sample_get_live_ad_tag_detail(): Returns: google.cloud.video.stitcher_v1.types.LiveAdTagDetail: - Container for a live session's ad tag - detail. + Information related to the details + for one ad tag. This resource is only + available for live sessions that do not + implement Google Ad Manager ad + insertion. """ # Create or coerce a protobuf request object. @@ -3296,6 +3303,805 @@ async def sample_delete_live_config(): # Done; return the response. return response + async def update_live_config( + self, + request: Optional[ + Union[video_stitcher_service.UpdateLiveConfigRequest, dict] + ] = None, + *, + live_config: Optional[live_configs.LiveConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the specified LiveConfig. Only update fields + specified in the call method body. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + async def sample_update_live_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + live_config = stitcher_v1.LiveConfig() + live_config.source_uri = "source_uri_value" + live_config.ad_tracking = "SERVER" + + request = stitcher_v1.UpdateLiveConfigRequest( + live_config=live_config, + ) + + # Make the request + operation = client.update_live_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.stitcher_v1.types.UpdateLiveConfigRequest, dict]]): + The request object. Request message for + VideoStitcherService.updateLiveConfig. + live_config (:class:`google.cloud.video.stitcher_v1.types.LiveConfig`): + Required. The LiveConfig resource + which replaces the resource on the + server. + + This corresponds to the ``live_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The update mask applies to the resource. For + the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.video.stitcher_v1.types.LiveConfig` + Metadata for used to register live configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([live_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.UpdateLiveConfigRequest): + request = video_stitcher_service.UpdateLiveConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if live_config is not None: + request.live_config = live_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_live_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("live_config.name", request.live_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + live_configs.LiveConfig, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.CreateVodConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + vod_config: Optional[vod_configs.VodConfig] = None, + vod_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Registers the VOD config with the provided unique ID + in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + async def sample_create_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.CreateVodConfigRequest( + parent="parent_value", + vod_config_id="vod_config_id_value", + vod_config=vod_config, + ) + + # Make the request + operation = client.create_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.stitcher_v1.types.CreateVodConfigRequest, dict]]): + The request object. Request message for + VideoStitcherService.createVodConfig + parent (:class:`str`): + Required. The project in which the VOD config should be + created, in the form of + ``projects/{project_number}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vod_config (:class:`google.cloud.video.stitcher_v1.types.VodConfig`): + Required. The VOD config resource to + create. + + This corresponds to the ``vod_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vod_config_id (:class:`str`): + Required. The unique identifier ID to + use for the VOD config. + + This corresponds to the ``vod_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.video.stitcher_v1.types.VodConfig` + Metadata used to register VOD configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, vod_config, vod_config_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.CreateVodConfigRequest): + request = video_stitcher_service.CreateVodConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if vod_config is not None: + request.vod_config = vod_config + if vod_config_id is not None: + request.vod_config_id = vod_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_vod_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vod_configs.VodConfig, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_vod_configs( + self, + request: Optional[ + Union[video_stitcher_service.ListVodConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVodConfigsAsyncPager: + r"""Lists all VOD configs managed by the Video Stitcher + API that belong to the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + async def sample_list_vod_configs(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + request = stitcher_v1.ListVodConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vod_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.video.stitcher_v1.types.ListVodConfigsRequest, dict]]): + The request object. Request message for + VideoStitcherService.listVodConfig. + parent (:class:`str`): + Required. The project that contains the list of VOD + configs, in the form of + ``projects/{project_number}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodConfigsAsyncPager: + Response message for + VideoStitcher.ListVodConfig. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.ListVodConfigsRequest): + request = video_stitcher_service.ListVodConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_vod_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVodConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.GetVodConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vod_configs.VodConfig: + r"""Returns the specified VOD config managed by the Video + Stitcher API service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + async def sample_get_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + request = stitcher_v1.GetVodConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vod_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.stitcher_v1.types.GetVodConfigRequest, dict]]): + The request object. Request message for + VideoStitcherService.getVodConfig. + name (:class:`str`): + Required. The name of the VOD config to be retrieved, in + the form of + ``projects/{project_number}/locations/{location}/vodConfigs/{id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.stitcher_v1.types.VodConfig: + Metadata used to register VOD + configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.GetVodConfigRequest): + request = video_stitcher_service.GetVodConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_vod_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.DeleteVodConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified VOD config. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + async def sample_delete_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + request = stitcher_v1.DeleteVodConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.stitcher_v1.types.DeleteVodConfigRequest, dict]]): + The request object. Request message for + VideoStitcherService.deleteVodConfig. + name (:class:`str`): + Required. The name of the VOD config to be deleted, in + the form of + ``projects/{project_number}/locations/{location}/vodConfigs/{id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.DeleteVodConfigRequest): + request = video_stitcher_service.DeleteVodConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_vod_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.UpdateVodConfigRequest, dict] + ] = None, + *, + vod_config: Optional[vod_configs.VodConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the specified VOD config. Only update fields + specified in the call method body. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + async def sample_update_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.UpdateVodConfigRequest( + vod_config=vod_config, + ) + + # Make the request + operation = client.update_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.stitcher_v1.types.UpdateVodConfigRequest, dict]]): + The request object. Request message for + VideoStitcherService.updateVodConfig. + vod_config (:class:`google.cloud.video.stitcher_v1.types.VodConfig`): + Required. The VOD config resource + which replaces the resource on the + server. + + This corresponds to the ``vod_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The update mask applies to the resource. For + the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.video.stitcher_v1.types.VodConfig` + Metadata used to register VOD configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([vod_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.UpdateVodConfigRequest): + request = video_stitcher_service.UpdateVodConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vod_config is not None: + request.vod_config = vod_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_vod_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("vod_config.name", request.vod_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vod_configs.VodConfig, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py index a92a903d7909..fb8777b4a272 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py @@ -58,11 +58,13 @@ from google.cloud.video.stitcher_v1.types import ( ad_tag_details, cdn_keys, + fetch_options, live_configs, sessions, slates, stitch_details, video_stitcher_service, + vod_configs, ) from .transports.base import DEFAULT_CLIENT_INFO, VideoStitcherServiceTransport @@ -338,6 +340,28 @@ def parse_vod_ad_tag_detail_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def vod_config_path( + project: str, + location: str, + vod_config: str, + ) -> str: + """Returns a fully-qualified vod_config string.""" + return "projects/{project}/locations/{location}/vodConfigs/{vod_config}".format( + project=project, + location=location, + vod_config=vod_config, + ) + + @staticmethod + def parse_vod_config_path(path: str) -> Dict[str, str]: + """Parses a vod_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/vodConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def vod_session_path( project: str, @@ -1533,8 +1557,6 @@ def sample_create_vod_session(): # Initialize request argument(s) vod_session = stitcher_v1.VodSession() - vod_session.source_uri = "source_uri_value" - vod_session.ad_tag_uri = "ad_tag_uri_value" vod_session.ad_tracking = "SERVER" request = stitcher_v1.CreateVodSessionRequest( @@ -1913,7 +1935,7 @@ def sample_get_vod_stitch_detail(): Returns: google.cloud.video.stitcher_v1.types.VodStitchDetail: - Detailed information related to the + Information related to the interstitial of a VOD session. This resource is only available for VOD sessions that do not implement Google Ad @@ -2371,8 +2393,11 @@ def sample_get_live_ad_tag_detail(): Returns: google.cloud.video.stitcher_v1.types.LiveAdTagDetail: - Container for a live session's ad tag - detail. + Information related to the details + for one ad tag. This resource is only + available for live sessions that do not + implement Google Ad Manager ad + insertion. """ # Create or coerce a protobuf request object. @@ -3774,6 +3799,787 @@ def sample_delete_live_config(): # Done; return the response. return response + def update_live_config( + self, + request: Optional[ + Union[video_stitcher_service.UpdateLiveConfigRequest, dict] + ] = None, + *, + live_config: Optional[live_configs.LiveConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the specified LiveConfig. Only update fields + specified in the call method body. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + def sample_update_live_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + live_config = stitcher_v1.LiveConfig() + live_config.source_uri = "source_uri_value" + live_config.ad_tracking = "SERVER" + + request = stitcher_v1.UpdateLiveConfigRequest( + live_config=live_config, + ) + + # Make the request + operation = client.update_live_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.stitcher_v1.types.UpdateLiveConfigRequest, dict]): + The request object. Request message for + VideoStitcherService.updateLiveConfig. + live_config (google.cloud.video.stitcher_v1.types.LiveConfig): + Required. The LiveConfig resource + which replaces the resource on the + server. + + This corresponds to the ``live_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The update mask applies to the resource. For + the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.video.stitcher_v1.types.LiveConfig` + Metadata for used to register live configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([live_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.UpdateLiveConfigRequest): + request = video_stitcher_service.UpdateLiveConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if live_config is not None: + request.live_config = live_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_live_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("live_config.name", request.live_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + live_configs.LiveConfig, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.CreateVodConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + vod_config: Optional[vod_configs.VodConfig] = None, + vod_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Registers the VOD config with the provided unique ID + in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + def sample_create_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.CreateVodConfigRequest( + parent="parent_value", + vod_config_id="vod_config_id_value", + vod_config=vod_config, + ) + + # Make the request + operation = client.create_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.stitcher_v1.types.CreateVodConfigRequest, dict]): + The request object. Request message for + VideoStitcherService.createVodConfig + parent (str): + Required. The project in which the VOD config should be + created, in the form of + ``projects/{project_number}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vod_config (google.cloud.video.stitcher_v1.types.VodConfig): + Required. The VOD config resource to + create. + + This corresponds to the ``vod_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vod_config_id (str): + Required. The unique identifier ID to + use for the VOD config. + + This corresponds to the ``vod_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.video.stitcher_v1.types.VodConfig` + Metadata used to register VOD configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, vod_config, vod_config_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.CreateVodConfigRequest): + request = video_stitcher_service.CreateVodConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if vod_config is not None: + request.vod_config = vod_config + if vod_config_id is not None: + request.vod_config_id = vod_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_vod_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vod_configs.VodConfig, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_vod_configs( + self, + request: Optional[ + Union[video_stitcher_service.ListVodConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVodConfigsPager: + r"""Lists all VOD configs managed by the Video Stitcher + API that belong to the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + def sample_list_vod_configs(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + request = stitcher_v1.ListVodConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vod_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.video.stitcher_v1.types.ListVodConfigsRequest, dict]): + The request object. Request message for + VideoStitcherService.listVodConfig. + parent (str): + Required. The project that contains the list of VOD + configs, in the form of + ``projects/{project_number}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodConfigsPager: + Response message for + VideoStitcher.ListVodConfig. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.ListVodConfigsRequest): + request = video_stitcher_service.ListVodConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_vod_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVodConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.GetVodConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vod_configs.VodConfig: + r"""Returns the specified VOD config managed by the Video + Stitcher API service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + def sample_get_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + request = stitcher_v1.GetVodConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_vod_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.stitcher_v1.types.GetVodConfigRequest, dict]): + The request object. Request message for + VideoStitcherService.getVodConfig. + name (str): + Required. The name of the VOD config to be retrieved, in + the form of + ``projects/{project_number}/locations/{location}/vodConfigs/{id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.stitcher_v1.types.VodConfig: + Metadata used to register VOD + configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.GetVodConfigRequest): + request = video_stitcher_service.GetVodConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_vod_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.DeleteVodConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified VOD config. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + def sample_delete_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + request = stitcher_v1.DeleteVodConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.stitcher_v1.types.DeleteVodConfigRequest, dict]): + The request object. Request message for + VideoStitcherService.deleteVodConfig. + name (str): + Required. The name of the VOD config to be deleted, in + the form of + ``projects/{project_number}/locations/{location}/vodConfigs/{id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.DeleteVodConfigRequest): + request = video_stitcher_service.DeleteVodConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_vod_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_vod_config( + self, + request: Optional[ + Union[video_stitcher_service.UpdateVodConfigRequest, dict] + ] = None, + *, + vod_config: Optional[vod_configs.VodConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the specified VOD config. Only update fields + specified in the call method body. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import stitcher_v1 + + def sample_update_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.UpdateVodConfigRequest( + vod_config=vod_config, + ) + + # Make the request + operation = client.update_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.stitcher_v1.types.UpdateVodConfigRequest, dict]): + The request object. Request message for + VideoStitcherService.updateVodConfig. + vod_config (google.cloud.video.stitcher_v1.types.VodConfig): + Required. The VOD config resource + which replaces the resource on the + server. + + This corresponds to the ``vod_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The update mask applies to the resource. For + the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.video.stitcher_v1.types.VodConfig` + Metadata used to register VOD configs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([vod_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, video_stitcher_service.UpdateVodConfigRequest): + request = video_stitcher_service.UpdateVodConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vod_config is not None: + request.vod_config = vod_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_vod_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("vod_config.name", request.vod_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vod_configs.VodConfig, + metadata_type=video_stitcher_service.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "VideoStitcherServiceClient": return self diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py index cbb55b7b4d6b..36589bc563f6 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py @@ -31,6 +31,7 @@ slates, stitch_details, video_stitcher_service, + vod_configs, ) @@ -816,3 +817,133 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVodConfigsPager: + """A pager for iterating through ``list_vod_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.stitcher_v1.types.ListVodConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``vod_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVodConfigs`` requests and continue to iterate + through the ``vod_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.stitcher_v1.types.ListVodConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., video_stitcher_service.ListVodConfigsResponse], + request: video_stitcher_service.ListVodConfigsRequest, + response: video_stitcher_service.ListVodConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.stitcher_v1.types.ListVodConfigsRequest): + The initial request object. + response (google.cloud.video.stitcher_v1.types.ListVodConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = video_stitcher_service.ListVodConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[video_stitcher_service.ListVodConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vod_configs.VodConfig]: + for page in self.pages: + yield from page.vod_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVodConfigsAsyncPager: + """A pager for iterating through ``list_vod_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.stitcher_v1.types.ListVodConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``vod_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListVodConfigs`` requests and continue to iterate + through the ``vod_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.stitcher_v1.types.ListVodConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[video_stitcher_service.ListVodConfigsResponse]], + request: video_stitcher_service.ListVodConfigsRequest, + response: video_stitcher_service.ListVodConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.stitcher_v1.types.ListVodConfigsRequest): + The initial request object. + response (google.cloud.video.stitcher_v1.types.ListVodConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = video_stitcher_service.ListVodConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[video_stitcher_service.ListVodConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vod_configs.VodConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.vod_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/base.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/base.py index c0c005d434c8..e67b61a1a4cf 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/base.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/base.py @@ -34,6 +34,7 @@ slates, stitch_details, video_stitcher_service, + vod_configs, ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -255,6 +256,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.update_live_config: gapic_v1.method.wrap_method( + self.update_live_config, + default_timeout=None, + client_info=client_info, + ), + self.create_vod_config: gapic_v1.method.wrap_method( + self.create_vod_config, + default_timeout=None, + client_info=client_info, + ), + self.list_vod_configs: gapic_v1.method.wrap_method( + self.list_vod_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_vod_config: gapic_v1.method.wrap_method( + self.get_vod_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_vod_config: gapic_v1.method.wrap_method( + self.delete_vod_config, + default_timeout=None, + client_info=client_info, + ), + self.update_vod_config: gapic_v1.method.wrap_method( + self.update_vod_config, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -509,6 +540,63 @@ def delete_live_config( ]: raise NotImplementedError() + @property + def update_live_config( + self, + ) -> Callable[ + [video_stitcher_service.UpdateLiveConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.CreateVodConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_vod_configs( + self, + ) -> Callable[ + [video_stitcher_service.ListVodConfigsRequest], + Union[ + video_stitcher_service.ListVodConfigsResponse, + Awaitable[video_stitcher_service.ListVodConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.GetVodConfigRequest], + Union[vod_configs.VodConfig, Awaitable[vod_configs.VodConfig]], + ]: + raise NotImplementedError() + + @property + def delete_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.DeleteVodConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.UpdateVodConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc.py index aec7eee80f7b..994e52d18090 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc.py @@ -31,6 +31,7 @@ slates, stitch_details, video_stitcher_service, + vod_configs, ) from .base import DEFAULT_CLIENT_INFO, VideoStitcherServiceTransport @@ -948,6 +949,178 @@ def delete_live_config( ) return self._stubs["delete_live_config"] + @property + def update_live_config( + self, + ) -> Callable[ + [video_stitcher_service.UpdateLiveConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update live config method over gRPC. + + Updates the specified LiveConfig. Only update fields + specified in the call method body. + + Returns: + Callable[[~.UpdateLiveConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_live_config" not in self._stubs: + self._stubs["update_live_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/UpdateLiveConfig", + request_serializer=video_stitcher_service.UpdateLiveConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_live_config"] + + @property + def create_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.CreateVodConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create vod config method over gRPC. + + Registers the VOD config with the provided unique ID + in the specified region. + + Returns: + Callable[[~.CreateVodConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_vod_config" not in self._stubs: + self._stubs["create_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/CreateVodConfig", + request_serializer=video_stitcher_service.CreateVodConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_vod_config"] + + @property + def list_vod_configs( + self, + ) -> Callable[ + [video_stitcher_service.ListVodConfigsRequest], + video_stitcher_service.ListVodConfigsResponse, + ]: + r"""Return a callable for the list vod configs method over gRPC. + + Lists all VOD configs managed by the Video Stitcher + API that belong to the specified project and region. + + Returns: + Callable[[~.ListVodConfigsRequest], + ~.ListVodConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vod_configs" not in self._stubs: + self._stubs["list_vod_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/ListVodConfigs", + request_serializer=video_stitcher_service.ListVodConfigsRequest.serialize, + response_deserializer=video_stitcher_service.ListVodConfigsResponse.deserialize, + ) + return self._stubs["list_vod_configs"] + + @property + def get_vod_config( + self, + ) -> Callable[[video_stitcher_service.GetVodConfigRequest], vod_configs.VodConfig]: + r"""Return a callable for the get vod config method over gRPC. + + Returns the specified VOD config managed by the Video + Stitcher API service. + + Returns: + Callable[[~.GetVodConfigRequest], + ~.VodConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vod_config" not in self._stubs: + self._stubs["get_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/GetVodConfig", + request_serializer=video_stitcher_service.GetVodConfigRequest.serialize, + response_deserializer=vod_configs.VodConfig.deserialize, + ) + return self._stubs["get_vod_config"] + + @property + def delete_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.DeleteVodConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete vod config method over gRPC. + + Deletes the specified VOD config. + + Returns: + Callable[[~.DeleteVodConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_vod_config" not in self._stubs: + self._stubs["delete_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/DeleteVodConfig", + request_serializer=video_stitcher_service.DeleteVodConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_vod_config"] + + @property + def update_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.UpdateVodConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update vod config method over gRPC. + + Updates the specified VOD config. Only update fields + specified in the call method body. + + Returns: + Callable[[~.UpdateVodConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_vod_config" not in self._stubs: + self._stubs["update_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/UpdateVodConfig", + request_serializer=video_stitcher_service.UpdateVodConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_vod_config"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc_asyncio.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc_asyncio.py index 8118e77a22ce..1fd3aea575eb 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/transports/grpc_asyncio.py @@ -33,6 +33,7 @@ slates, stitch_details, video_stitcher_service, + vod_configs, ) from .base import DEFAULT_CLIENT_INFO, VideoStitcherServiceTransport @@ -968,6 +969,184 @@ def delete_live_config( ) return self._stubs["delete_live_config"] + @property + def update_live_config( + self, + ) -> Callable[ + [video_stitcher_service.UpdateLiveConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update live config method over gRPC. + + Updates the specified LiveConfig. Only update fields + specified in the call method body. + + Returns: + Callable[[~.UpdateLiveConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_live_config" not in self._stubs: + self._stubs["update_live_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/UpdateLiveConfig", + request_serializer=video_stitcher_service.UpdateLiveConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_live_config"] + + @property + def create_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.CreateVodConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create vod config method over gRPC. + + Registers the VOD config with the provided unique ID + in the specified region. + + Returns: + Callable[[~.CreateVodConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_vod_config" not in self._stubs: + self._stubs["create_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/CreateVodConfig", + request_serializer=video_stitcher_service.CreateVodConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_vod_config"] + + @property + def list_vod_configs( + self, + ) -> Callable[ + [video_stitcher_service.ListVodConfigsRequest], + Awaitable[video_stitcher_service.ListVodConfigsResponse], + ]: + r"""Return a callable for the list vod configs method over gRPC. + + Lists all VOD configs managed by the Video Stitcher + API that belong to the specified project and region. + + Returns: + Callable[[~.ListVodConfigsRequest], + Awaitable[~.ListVodConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vod_configs" not in self._stubs: + self._stubs["list_vod_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/ListVodConfigs", + request_serializer=video_stitcher_service.ListVodConfigsRequest.serialize, + response_deserializer=video_stitcher_service.ListVodConfigsResponse.deserialize, + ) + return self._stubs["list_vod_configs"] + + @property + def get_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.GetVodConfigRequest], Awaitable[vod_configs.VodConfig] + ]: + r"""Return a callable for the get vod config method over gRPC. + + Returns the specified VOD config managed by the Video + Stitcher API service. + + Returns: + Callable[[~.GetVodConfigRequest], + Awaitable[~.VodConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vod_config" not in self._stubs: + self._stubs["get_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/GetVodConfig", + request_serializer=video_stitcher_service.GetVodConfigRequest.serialize, + response_deserializer=vod_configs.VodConfig.deserialize, + ) + return self._stubs["get_vod_config"] + + @property + def delete_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.DeleteVodConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete vod config method over gRPC. + + Deletes the specified VOD config. + + Returns: + Callable[[~.DeleteVodConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_vod_config" not in self._stubs: + self._stubs["delete_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/DeleteVodConfig", + request_serializer=video_stitcher_service.DeleteVodConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_vod_config"] + + @property + def update_vod_config( + self, + ) -> Callable[ + [video_stitcher_service.UpdateVodConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update vod config method over gRPC. + + Updates the specified VOD config. Only update fields + specified in the call method body. + + Returns: + Callable[[~.UpdateVodConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_vod_config" not in self._stubs: + self._stubs["update_vod_config"] = self.grpc_channel.unary_unary( + "/google.cloud.video.stitcher.v1.VideoStitcherService/UpdateVodConfig", + request_serializer=video_stitcher_service.UpdateVodConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_vod_config"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1091,6 +1270,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.update_live_config: gapic_v1.method_async.wrap_method( + self.update_live_config, + default_timeout=None, + client_info=client_info, + ), + self.create_vod_config: gapic_v1.method_async.wrap_method( + self.create_vod_config, + default_timeout=None, + client_info=client_info, + ), + self.list_vod_configs: gapic_v1.method_async.wrap_method( + self.list_vod_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_vod_config: gapic_v1.method_async.wrap_method( + self.get_vod_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_vod_config: gapic_v1.method_async.wrap_method( + self.delete_vod_config, + default_timeout=None, + client_info=client_info, + ), + self.update_vod_config: gapic_v1.method_async.wrap_method( + self.update_vod_config, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/__init__.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/__init__.py index 49dedc50887b..9e64d531137a 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/__init__.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/__init__.py @@ -29,6 +29,7 @@ StaticAdResource, ) from .events import Event, ProgressEvent +from .fetch_options import FetchOptions from .live_configs import AdTracking, GamLiveConfig, LiveConfig, PrefetchConfig from .sessions import ( Interstitials, @@ -47,16 +48,19 @@ CreateLiveConfigRequest, CreateLiveSessionRequest, CreateSlateRequest, + CreateVodConfigRequest, CreateVodSessionRequest, DeleteCdnKeyRequest, DeleteLiveConfigRequest, DeleteSlateRequest, + DeleteVodConfigRequest, GetCdnKeyRequest, GetLiveAdTagDetailRequest, GetLiveConfigRequest, GetLiveSessionRequest, GetSlateRequest, GetVodAdTagDetailRequest, + GetVodConfigRequest, GetVodSessionRequest, GetVodStitchDetailRequest, ListCdnKeysRequest, @@ -69,12 +73,17 @@ ListSlatesResponse, ListVodAdTagDetailsRequest, ListVodAdTagDetailsResponse, + ListVodConfigsRequest, + ListVodConfigsResponse, ListVodStitchDetailsRequest, ListVodStitchDetailsResponse, OperationMetadata, UpdateCdnKeyRequest, + UpdateLiveConfigRequest, UpdateSlateRequest, + UpdateVodConfigRequest, ) +from .vod_configs import GamVodConfig, VodConfig __all__ = ( "AdRequest", @@ -93,6 +102,7 @@ "StaticAdResource", "Event", "ProgressEvent", + "FetchOptions", "GamLiveConfig", "LiveConfig", "PrefetchConfig", @@ -112,16 +122,19 @@ "CreateLiveConfigRequest", "CreateLiveSessionRequest", "CreateSlateRequest", + "CreateVodConfigRequest", "CreateVodSessionRequest", "DeleteCdnKeyRequest", "DeleteLiveConfigRequest", "DeleteSlateRequest", + "DeleteVodConfigRequest", "GetCdnKeyRequest", "GetLiveAdTagDetailRequest", "GetLiveConfigRequest", "GetLiveSessionRequest", "GetSlateRequest", "GetVodAdTagDetailRequest", + "GetVodConfigRequest", "GetVodSessionRequest", "GetVodStitchDetailRequest", "ListCdnKeysRequest", @@ -134,9 +147,15 @@ "ListSlatesResponse", "ListVodAdTagDetailsRequest", "ListVodAdTagDetailsResponse", + "ListVodConfigsRequest", + "ListVodConfigsResponse", "ListVodStitchDetailsRequest", "ListVodStitchDetailsResponse", "OperationMetadata", "UpdateCdnKeyRequest", + "UpdateLiveConfigRequest", "UpdateSlateRequest", + "UpdateVodConfigRequest", + "GamVodConfig", + "VodConfig", ) diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/ad_tag_details.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/ad_tag_details.py index d152bcdf4878..966a4898fc67 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/ad_tag_details.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/ad_tag_details.py @@ -34,7 +34,9 @@ class LiveAdTagDetail(proto.Message): - r"""Container for a live session's ad tag detail. + r"""Information related to the details for one ad tag. This + resource is only available for live sessions that do not + implement Google Ad Manager ad insertion. Attributes: name (str): diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/cdn_keys.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/cdn_keys.py index 51c4b4b2d66c..a17180e60ac6 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/cdn_keys.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/cdn_keys.py @@ -136,8 +136,33 @@ class MediaCdnKey(proto.Message): this Media CDN key. key_name (str): The keyset name of the Media CDN key. + token_config (google.cloud.video.stitcher_v1.types.MediaCdnKey.TokenConfig): + Optional. If set, the URL will be signed + using the Media CDN token. Otherwise, the URL + would be signed using the standard Media CDN + signature. """ + class TokenConfig(proto.Message): + r"""Configuration for a Media CDN token. + + Attributes: + query_parameter (str): + Optional. The query parameter in which to find the token. + + The name must be 1-64 characters long and match the regular + expression ``[a-zA-Z]([a-zA-Z0-9_-])*`` which means the + first character must be a letter, and all following + characters must be a dash, underscore, letter or digit. + + Defaults to ``edge-cache-token``. + """ + + query_parameter: str = proto.Field( + proto.STRING, + number=1, + ) + private_key: bytes = proto.Field( proto.BYTES, number=1, @@ -146,6 +171,11 @@ class MediaCdnKey(proto.Message): proto.STRING, number=2, ) + token_config: TokenConfig = proto.Field( + proto.MESSAGE, + number=3, + message=TokenConfig, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/fetch_options.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/fetch_options.py new file mode 100644 index 000000000000..3fdac9107f90 --- /dev/null +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/fetch_options.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.video.stitcher.v1", + manifest={ + "FetchOptions", + }, +) + + +class FetchOptions(proto.Message): + r"""Options on how fetches should be made. + + Attributes: + headers (MutableMapping[str, str]): + Custom headers to pass into fetch request. + Headers must have a maximum of 3 key value + pairs. Each key value pair must have a maximum + of 256 characters per key and 256 characters per + value. + """ + + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/live_configs.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/live_configs.py index 842b21de778c..733bacb25b57 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/live_configs.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/live_configs.py @@ -20,6 +20,8 @@ from google.protobuf import duration_pb2 # type: ignore import proto # type: ignore +from google.cloud.video.stitcher_v1.types import fetch_options + __protobuf__ = proto.module( package="google.cloud.video.stitcher.v1", manifest={ @@ -70,10 +72,7 @@ class LiveConfig(proto.Message): state (google.cloud.video.stitcher_v1.types.LiveConfig.State): Output only. State of the live config. ad_tracking (google.cloud.video.stitcher_v1.types.AdTracking): - Required. Determines how the ads are tracked. If - [gam_live_config][google.cloud.video.stitcher.v1.LiveConfig.gam_live_config] - is set, the value must be ``CLIENT`` because the IMA SDK - handles ad tracking. + Required. Determines how the ads are tracked. default_slate (str): This must refer to a slate in the same project. If Google Ad Manager (GAM) is used for ads, this string sets the value of @@ -85,6 +84,9 @@ class LiveConfig(proto.Message): default is ``CUT_CURRENT``. prefetch_config (google.cloud.video.stitcher_v1.types.PrefetchConfig): The configuration for prefetching ads. + source_fetch_options (google.cloud.video.stitcher_v1.types.FetchOptions): + Options for fetching source manifests and + segments. """ class State(proto.Enum): @@ -165,6 +167,11 @@ class StitchingPolicy(proto.Enum): number=10, message="PrefetchConfig", ) + source_fetch_options: fetch_options.FetchOptions = proto.Field( + proto.MESSAGE, + number=16, + message=fetch_options.FetchOptions, + ) class PrefetchConfig(proto.Message): diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/sessions.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/sessions.py index 35b8b3eaabce..db14873192a5 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/sessions.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/sessions.py @@ -52,15 +52,22 @@ class VodSession(proto.Message): Output only. The playback URI of the stitched content. source_uri (str): - Required. URI of the media to stitch. + URI of the media to stitch. For most use cases, you should + create a + [VodConfig][google.cloud.video.stitcher.v1.VodConfig] with + this information rather than setting this field directly. ad_tag_uri (str): - Required. Ad tag URI. + Ad tag URI. For most use cases, you should create a + [VodConfig][google.cloud.video.stitcher.v1.VodConfig] with + this information rather than setting this field directly. ad_tag_macro_map (MutableMapping[str, str]): - Key value pairs for ad tag macro replacement. If the - specified ad tag URI has macros, this field provides the - mapping to the value that will replace the macro in the ad - tag URI. Macros are designated by square brackets. For - example: + Key value pairs for ad tag macro replacement, only available + for VOD sessions that do not implement Google Ad manager ad + insertion. If the specified ad tag URI has macros, this + field provides the mapping to the value that will replace + the macro in the ad tag URI. + + Macros are designated by square brackets, for example: Ad tag URI: ``"https://doubleclick.google.com/ad/1?geo_id=[geoId]"`` @@ -68,7 +75,7 @@ class VodSession(proto.Message): Ad tag macro map: ``{"geoId": "123"}`` Fully qualified ad tag: - ``"``\ https://doubleclick.google.com/ad/1?geo_id=123"\` + ``"https://doubleclick.google.com/ad/1?geo_id=123"`` manifest_options (google.cloud.video.stitcher_v1.types.ManifestOptions): Additional options that affect the output of the manifest. @@ -81,6 +88,10 @@ class VodSession(proto.Message): gam_settings (google.cloud.video.stitcher_v1.types.VodSession.GamSettings): This field should be set with appropriate values if GAM is being used for ads. + vod_config (str): + The resource name of the VOD config for this session, in the + form of + ``projects/{project}/locations/{location}/vodConfigs/{id}``. """ class GamSettings(proto.Message): @@ -149,6 +160,10 @@ class GamSettings(proto.Message): number=13, message=GamSettings, ) + vod_config: str = proto.Field( + proto.STRING, + number=14, + ) class Interstitials(proto.Message): @@ -277,12 +292,13 @@ class LiveSession(proto.Message): Output only. The URI to play the live session's ad-stitched stream. ad_tag_macros (MutableMapping[str, str]): - Key value pairs for ad tag macro replacement. If the - specified ad tag URI has macros, this field provides the - mapping to the value that will replace the macro in the ad - tag URI. Macros are designated by square brackets. + Key value pairs for ad tag macro replacement, only available + for live sessions that do not implement Google Ad manager ad + insertion. If the specified ad tag URI has macros, this + field provides the mapping to the value that will replace + the macro in the ad tag URI. - For example: + Macros are designated by square brackets, for example: Ad tag URI: "https://doubleclick.google.com/ad/1?geo_id=[geoId]" @@ -301,22 +317,37 @@ class LiveSession(proto.Message): Required. The resource name of the live config for this session, in the form of ``projects/{project}/locations/{location}/liveConfigs/{id}``. + ad_tracking (google.cloud.video.stitcher_v1.types.AdTracking): + Determines how the ad should be tracked. This + overrides the value set in the live config for + this session. """ class GamSettings(proto.Message): - r"""Defines fields related to Google Ad Manager (GAM). This - should be set if GAM is being used for ads. + r"""Defines fields related to Google Ad Manager (GAM). Attributes: stream_id (str): Required. The stream ID generated by Ad - Manager. + Manager. This must be set if GAM is being used + for ads and the session uses client-side ad + tracking. + targeting_parameters (MutableMapping[str, str]): + `Targeting + parameters `__ + to send to Ad Manager to generate a stream ID. This should + only be set if the session uses server-side ad tracking. """ stream_id: str = proto.Field( proto.STRING, number=1, ) + targeting_parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) name: str = proto.Field( proto.STRING, @@ -345,6 +376,11 @@ class GamSettings(proto.Message): proto.STRING, number=16, ) + ad_tracking: live_configs.AdTracking = proto.Field( + proto.ENUM, + number=17, + enum=live_configs.AdTracking, + ) class ManifestOptions(proto.Message): diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/stitch_details.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/stitch_details.py index c70ca5c734db..6959ff876778 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/stitch_details.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/stitch_details.py @@ -31,9 +31,9 @@ class VodStitchDetail(proto.Message): - r"""Detailed information related to the interstitial of a VOD - session. This resource is only available for VOD sessions that - do not implement Google Ad Manager ad insertion. + r"""Information related to the interstitial of a VOD session. + This resource is only available for VOD sessions that do not + implement Google Ad Manager ad insertion. Attributes: name (str): diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/video_stitcher_service.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/video_stitcher_service.py index 4a7add275439..6c79dceca519 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/video_stitcher_service.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/video_stitcher_service.py @@ -22,6 +22,7 @@ import proto # type: ignore from google.cloud.video.stitcher_v1.types import live_configs as gcvs_live_configs +from google.cloud.video.stitcher_v1.types import vod_configs as gcvs_vod_configs from google.cloud.video.stitcher_v1.types import ad_tag_details from google.cloud.video.stitcher_v1.types import cdn_keys as gcvs_cdn_keys from google.cloud.video.stitcher_v1.types import sessions @@ -61,6 +62,13 @@ "ListLiveConfigsResponse", "GetLiveConfigRequest", "DeleteLiveConfigRequest", + "UpdateLiveConfigRequest", + "CreateVodConfigRequest", + "ListVodConfigsRequest", + "ListVodConfigsResponse", + "GetVodConfigRequest", + "DeleteVodConfigRequest", + "UpdateVodConfigRequest", "OperationMetadata", }, ) @@ -874,6 +882,215 @@ class DeleteLiveConfigRequest(proto.Message): ) +class UpdateLiveConfigRequest(proto.Message): + r"""Request message for VideoStitcherService.updateLiveConfig. + + Attributes: + live_config (google.cloud.video.stitcher_v1.types.LiveConfig): + Required. The LiveConfig resource which + replaces the resource on the server. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + live_config: gcvs_live_configs.LiveConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gcvs_live_configs.LiveConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateVodConfigRequest(proto.Message): + r"""Request message for VideoStitcherService.createVodConfig + + Attributes: + parent (str): + Required. The project in which the VOD config should be + created, in the form of + ``projects/{project_number}/locations/{location}``. + vod_config_id (str): + Required. The unique identifier ID to use for + the VOD config. + vod_config (google.cloud.video.stitcher_v1.types.VodConfig): + Required. The VOD config resource to create. + request_id (str): + Optional. A request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. The server will guarantee that for at least + 60 minutes since the first request. + + For example, consider a situation where you make an initial + request and the request times out. If you make the request + again with the same request ID, the server can check if + original operation with the same request ID was received, + and if so, will ignore the second request. This prevents + clients from accidentally creating duplicate commitments. + + The request ID must be a valid UUID with the exception that + zero UUID is not supported + ``(00000000-0000-0000-0000-000000000000)``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + vod_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + vod_config: gcvs_vod_configs.VodConfig = proto.Field( + proto.MESSAGE, + number=3, + message=gcvs_vod_configs.VodConfig, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListVodConfigsRequest(proto.Message): + r"""Request message for VideoStitcherService.listVodConfig. + + Attributes: + parent (str): + Required. The project that contains the list of VOD configs, + in the form of + ``projects/{project_number}/locations/{location}``. + page_size (int): + Optional. The maximum number of items to + return. + page_token (str): + Optional. The next_page_token value returned from a previous + List request, if any. + filter (str): + Optional. The filter to apply to list results (see + `Filtering `__). + order_by (str): + Optional. Specifies the ordering of results following `Cloud + API + syntax `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListVodConfigsResponse(proto.Message): + r"""Response message for VideoStitcher.ListVodConfig. + + Attributes: + vod_configs (MutableSequence[google.cloud.video.stitcher_v1.types.VodConfig]): + List of VOD configs. + next_page_token (str): + The pagination token. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + vod_configs: MutableSequence[gcvs_vod_configs.VodConfig] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcvs_vod_configs.VodConfig, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetVodConfigRequest(proto.Message): + r"""Request message for VideoStitcherService.getVodConfig. + + Attributes: + name (str): + Required. The name of the VOD config to be retrieved, in the + form of + ``projects/{project_number}/locations/{location}/vodConfigs/{id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteVodConfigRequest(proto.Message): + r"""Request message for VideoStitcherService.deleteVodConfig. + + Attributes: + name (str): + Required. The name of the VOD config to be deleted, in the + form of + ``projects/{project_number}/locations/{location}/vodConfigs/{id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateVodConfigRequest(proto.Message): + r"""Request message for VideoStitcherService.updateVodConfig. + + Attributes: + vod_config (google.cloud.video.stitcher_v1.types.VodConfig): + Required. The VOD config resource which + replaces the resource on the server. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + vod_config: gcvs_vod_configs.VodConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gcvs_vod_configs.VodConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/vod_configs.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/vod_configs.py new file mode 100644 index 000000000000..9ba8b12fdbcd --- /dev/null +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/types/vod_configs.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.video.stitcher_v1.types import fetch_options + +__protobuf__ = proto.module( + package="google.cloud.video.stitcher.v1", + manifest={ + "VodConfig", + "GamVodConfig", + }, +) + + +class VodConfig(proto.Message): + r"""Metadata used to register VOD configs. + + Attributes: + name (str): + Output only. The resource name of the VOD config, in the + form of + ``projects/{project}/locations/{location}/vodConfigs/{id}``. + source_uri (str): + Required. Source URI for the VOD stream + manifest. + ad_tag_uri (str): + Required. The default ad tag associated with + this VOD config. + gam_vod_config (google.cloud.video.stitcher_v1.types.GamVodConfig): + Optional. Google Ad Manager (GAM) metadata. + state (google.cloud.video.stitcher_v1.types.VodConfig.State): + Output only. State of the VOD config. + source_fetch_options (google.cloud.video.stitcher_v1.types.FetchOptions): + Options for fetching source manifests and + segments. + """ + + class State(proto.Enum): + r"""State of the VOD config. + + Values: + STATE_UNSPECIFIED (0): + State is not specified. + CREATING (1): + VOD config is being created. + READY (2): + VOD config is ready for use. + DELETING (3): + VOD config is queued up for deletion. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_uri: str = proto.Field( + proto.STRING, + number=2, + ) + ad_tag_uri: str = proto.Field( + proto.STRING, + number=3, + ) + gam_vod_config: "GamVodConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="GamVodConfig", + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + source_fetch_options: fetch_options.FetchOptions = proto.Field( + proto.MESSAGE, + number=8, + message=fetch_options.FetchOptions, + ) + + +class GamVodConfig(proto.Message): + r"""Metadata used for GAM ad decisioning. + + Attributes: + network_code (str): + Required. Ad Manager network code to + associate with the VOD config. + """ + + network_code: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json b/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json index 98e689015029..e2f9394ca881 100644 --- a/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json @@ -711,6 +711,183 @@ ], "title": "videostitcher_v1_generated_video_stitcher_service_create_slate_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", + "shortName": "VideoStitcherServiceAsyncClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.create_vod_config", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.CreateVodConfig", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "CreateVodConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.CreateVodConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "vod_config", + "type": "google.cloud.video.stitcher_v1.types.VodConfig" + }, + { + "name": "vod_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_vod_config" + }, + "description": "Sample for CreateVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_create_vod_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_CreateVodConfig_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_create_vod_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", + "shortName": "VideoStitcherServiceClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.create_vod_config", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.CreateVodConfig", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "CreateVodConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.CreateVodConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "vod_config", + "type": "google.cloud.video.stitcher_v1.types.VodConfig" + }, + { + "name": "vod_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_vod_config" + }, + "description": "Sample for CreateVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_create_vod_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_CreateVodConfig_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_create_vod_config_sync.py" + }, { "canonical": true, "clientMethod": { @@ -764,12 +941,12 @@ "regionTag": "videostitcher_v1_generated_VideoStitcherService_CreateVodSession_async", "segments": [ { - "end": 57, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 55, "start": 27, "type": "SHORT" }, @@ -779,18 +956,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 52, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -848,12 +1025,12 @@ "regionTag": "videostitcher_v1_generated_VideoStitcherService_CreateVodSession_sync", "segments": [ { - "end": 57, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 55, "start": 27, "type": "SHORT" }, @@ -863,18 +1040,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 52, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -1371,19 +1548,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_cdn_key", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.delete_vod_config", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetCdnKey", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.DeleteVodConfig", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetCdnKey" + "shortName": "DeleteVodConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetCdnKeyRequest" + "type": "google.cloud.video.stitcher_v1.types.DeleteVodConfigRequest" }, { "name": "name", @@ -1402,22 +1579,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.CdnKey", - "shortName": "get_cdn_key" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_vod_config" }, - "description": "Sample for GetCdnKey", - "file": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_async.py", + "description": "Sample for DeleteVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_delete_vod_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetCdnKey_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_DeleteVodConfig_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1432,17 +1609,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_delete_vod_config_async.py" }, { "canonical": true, @@ -1451,19 +1628,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_cdn_key", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.delete_vod_config", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetCdnKey", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.DeleteVodConfig", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetCdnKey" + "shortName": "DeleteVodConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetCdnKeyRequest" + "type": "google.cloud.video.stitcher_v1.types.DeleteVodConfigRequest" }, { "name": "name", @@ -1482,22 +1659,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.CdnKey", - "shortName": "get_cdn_key" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_vod_config" }, - "description": "Sample for GetCdnKey", - "file": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_sync.py", + "description": "Sample for DeleteVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_delete_vod_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetCdnKey_sync", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_DeleteVodConfig_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1512,17 +1689,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_sync.py" + "title": "videostitcher_v1_generated_video_stitcher_service_delete_vod_config_sync.py" }, { "canonical": true, @@ -1532,19 +1709,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_live_ad_tag_detail", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_cdn_key", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetLiveAdTagDetail", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetCdnKey", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetLiveAdTagDetail" + "shortName": "GetCdnKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetLiveAdTagDetailRequest" + "type": "google.cloud.video.stitcher_v1.types.GetCdnKeyRequest" }, { "name": "name", @@ -1563,14 +1740,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.LiveAdTagDetail", - "shortName": "get_live_ad_tag_detail" + "resultType": "google.cloud.video.stitcher_v1.types.CdnKey", + "shortName": "get_cdn_key" }, - "description": "Sample for GetLiveAdTagDetail", - "file": "videostitcher_v1_generated_video_stitcher_service_get_live_ad_tag_detail_async.py", + "description": "Sample for GetCdnKey", + "file": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetLiveAdTagDetail_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetCdnKey_async", "segments": [ { "end": 51, @@ -1603,7 +1780,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_live_ad_tag_detail_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_async.py" }, { "canonical": true, @@ -1612,19 +1789,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_live_ad_tag_detail", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_cdn_key", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetLiveAdTagDetail", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetCdnKey", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetLiveAdTagDetail" + "shortName": "GetCdnKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetLiveAdTagDetailRequest" + "type": "google.cloud.video.stitcher_v1.types.GetCdnKeyRequest" }, { "name": "name", @@ -1643,12 +1820,173 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.LiveAdTagDetail", - "shortName": "get_live_ad_tag_detail" + "resultType": "google.cloud.video.stitcher_v1.types.CdnKey", + "shortName": "get_cdn_key" }, - "description": "Sample for GetLiveAdTagDetail", - "file": "videostitcher_v1_generated_video_stitcher_service_get_live_ad_tag_detail_sync.py", - "language": "PYTHON", + "description": "Sample for GetCdnKey", + "file": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetCdnKey_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_get_cdn_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", + "shortName": "VideoStitcherServiceAsyncClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_live_ad_tag_detail", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetLiveAdTagDetail", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "GetLiveAdTagDetail" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.GetLiveAdTagDetailRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.stitcher_v1.types.LiveAdTagDetail", + "shortName": "get_live_ad_tag_detail" + }, + "description": "Sample for GetLiveAdTagDetail", + "file": "videostitcher_v1_generated_video_stitcher_service_get_live_ad_tag_detail_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetLiveAdTagDetail_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_get_live_ad_tag_detail_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", + "shortName": "VideoStitcherServiceClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_live_ad_tag_detail", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetLiveAdTagDetail", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "GetLiveAdTagDetail" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.GetLiveAdTagDetailRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.stitcher_v1.types.LiveAdTagDetail", + "shortName": "get_live_ad_tag_detail" + }, + "description": "Sample for GetLiveAdTagDetail", + "file": "videostitcher_v1_generated_video_stitcher_service_get_live_ad_tag_detail_sync.py", + "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetLiveAdTagDetail_sync", "segments": [ @@ -2337,19 +2675,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_vod_session", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_vod_config", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodSession", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodConfig", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetVodSession" + "shortName": "GetVodConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetVodSessionRequest" + "type": "google.cloud.video.stitcher_v1.types.GetVodConfigRequest" }, { "name": "name", @@ -2368,14 +2706,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.VodSession", - "shortName": "get_vod_session" + "resultType": "google.cloud.video.stitcher_v1.types.VodConfig", + "shortName": "get_vod_config" }, - "description": "Sample for GetVodSession", - "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_async.py", + "description": "Sample for GetVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodSession_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodConfig_async", "segments": [ { "end": 51, @@ -2408,7 +2746,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_config_async.py" }, { "canonical": true, @@ -2417,19 +2755,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_vod_session", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_vod_config", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodSession", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodConfig", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetVodSession" + "shortName": "GetVodConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetVodSessionRequest" + "type": "google.cloud.video.stitcher_v1.types.GetVodConfigRequest" }, { "name": "name", @@ -2448,14 +2786,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.VodSession", - "shortName": "get_vod_session" + "resultType": "google.cloud.video.stitcher_v1.types.VodConfig", + "shortName": "get_vod_config" }, - "description": "Sample for GetVodSession", - "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_sync.py", + "description": "Sample for GetVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodSession_sync", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodConfig_sync", "segments": [ { "end": 51, @@ -2488,7 +2826,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_sync.py" + "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_config_sync.py" }, { "canonical": true, @@ -2498,19 +2836,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_vod_stitch_detail", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_vod_session", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodStitchDetail", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodSession", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetVodStitchDetail" + "shortName": "GetVodSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetVodStitchDetailRequest" + "type": "google.cloud.video.stitcher_v1.types.GetVodSessionRequest" }, { "name": "name", @@ -2529,14 +2867,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.VodStitchDetail", - "shortName": "get_vod_stitch_detail" + "resultType": "google.cloud.video.stitcher_v1.types.VodSession", + "shortName": "get_vod_session" }, - "description": "Sample for GetVodStitchDetail", - "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_async.py", + "description": "Sample for GetVodSession", + "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodStitchDetail_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodSession_async", "segments": [ { "end": 51, @@ -2569,7 +2907,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_async.py" }, { "canonical": true, @@ -2578,19 +2916,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_vod_stitch_detail", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_vod_session", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodStitchDetail", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodSession", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "GetVodStitchDetail" + "shortName": "GetVodSession" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.GetVodStitchDetailRequest" + "type": "google.cloud.video.stitcher_v1.types.GetVodSessionRequest" }, { "name": "name", @@ -2609,14 +2947,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.types.VodStitchDetail", - "shortName": "get_vod_stitch_detail" + "resultType": "google.cloud.video.stitcher_v1.types.VodSession", + "shortName": "get_vod_session" }, - "description": "Sample for GetVodStitchDetail", - "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_sync.py", + "description": "Sample for GetVodSession", + "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodStitchDetail_sync", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodSession_sync", "segments": [ { "end": 51, @@ -2649,7 +2987,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_sync.py" + "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_session_sync.py" }, { "canonical": true, @@ -2659,22 +2997,22 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.list_cdn_keys", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.get_vod_stitch_detail", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListCdnKeys", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodStitchDetail", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "ListCdnKeys" + "shortName": "GetVodStitchDetail" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.ListCdnKeysRequest" + "type": "google.cloud.video.stitcher_v1.types.GetVodStitchDetailRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2690,22 +3028,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListCdnKeysAsyncPager", - "shortName": "list_cdn_keys" + "resultType": "google.cloud.video.stitcher_v1.types.VodStitchDetail", + "shortName": "get_vod_stitch_detail" }, - "description": "Sample for ListCdnKeys", - "file": "videostitcher_v1_generated_video_stitcher_service_list_cdn_keys_async.py", + "description": "Sample for GetVodStitchDetail", + "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListCdnKeys_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodStitchDetail_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2725,12 +3063,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_list_cdn_keys_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_async.py" }, { "canonical": true, @@ -2739,9 +3077,170 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.list_cdn_keys", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.get_vod_stitch_detail", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListCdnKeys", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.GetVodStitchDetail", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "GetVodStitchDetail" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.GetVodStitchDetailRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.stitcher_v1.types.VodStitchDetail", + "shortName": "get_vod_stitch_detail" + }, + "description": "Sample for GetVodStitchDetail", + "file": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_GetVodStitchDetail_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_get_vod_stitch_detail_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", + "shortName": "VideoStitcherServiceAsyncClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.list_cdn_keys", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListCdnKeys", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "ListCdnKeys" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.ListCdnKeysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListCdnKeysAsyncPager", + "shortName": "list_cdn_keys" + }, + "description": "Sample for ListCdnKeys", + "file": "videostitcher_v1_generated_video_stitcher_service_list_cdn_keys_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListCdnKeys_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_list_cdn_keys_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", + "shortName": "VideoStitcherServiceClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.list_cdn_keys", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListCdnKeys", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" @@ -3464,19 +3963,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.list_vod_stitch_details", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.list_vod_configs", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListVodStitchDetails", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListVodConfigs", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "ListVodStitchDetails" + "shortName": "ListVodConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.ListVodStitchDetailsRequest" + "type": "google.cloud.video.stitcher_v1.types.ListVodConfigsRequest" }, { "name": "parent", @@ -3495,14 +3994,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodStitchDetailsAsyncPager", - "shortName": "list_vod_stitch_details" + "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodConfigsAsyncPager", + "shortName": "list_vod_configs" }, - "description": "Sample for ListVodStitchDetails", - "file": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_async.py", + "description": "Sample for ListVodConfigs", + "file": "videostitcher_v1_generated_video_stitcher_service_list_vod_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListVodStitchDetails_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListVodConfigs_async", "segments": [ { "end": 52, @@ -3535,7 +4034,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_list_vod_configs_async.py" }, { "canonical": true, @@ -3544,19 +4043,19 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.list_vod_stitch_details", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.list_vod_configs", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListVodStitchDetails", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListVodConfigs", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "ListVodStitchDetails" + "shortName": "ListVodConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.ListVodStitchDetailsRequest" + "type": "google.cloud.video.stitcher_v1.types.ListVodConfigsRequest" }, { "name": "parent", @@ -3575,14 +4074,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodStitchDetailsPager", - "shortName": "list_vod_stitch_details" + "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodConfigsPager", + "shortName": "list_vod_configs" }, - "description": "Sample for ListVodStitchDetails", - "file": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_sync.py", + "description": "Sample for ListVodConfigs", + "file": "videostitcher_v1_generated_video_stitcher_service_list_vod_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListVodStitchDetails_sync", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListVodConfigs_sync", "segments": [ { "end": 52, @@ -3615,7 +4114,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_sync.py" + "title": "videostitcher_v1_generated_video_stitcher_service_list_vod_configs_sync.py" }, { "canonical": true, @@ -3625,27 +4124,23 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", "shortName": "VideoStitcherServiceAsyncClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.update_cdn_key", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.list_vod_stitch_details", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateCdnKey", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListVodStitchDetails", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "UpdateCdnKey" + "shortName": "ListVodStitchDetails" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.UpdateCdnKeyRequest" - }, - { - "name": "cdn_key", - "type": "google.cloud.video.stitcher_v1.types.CdnKey" + "type": "google.cloud.video.stitcher_v1.types.ListVodStitchDetailsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" }, { "name": "retry", @@ -3660,22 +4155,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_cdn_key" + "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodStitchDetailsAsyncPager", + "shortName": "list_vod_stitch_details" }, - "description": "Sample for UpdateCdnKey", - "file": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_async.py", + "description": "Sample for ListVodStitchDetails", + "file": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateCdnKey_async", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListVodStitchDetails_async", "segments": [ { - "end": 54, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3685,22 +4180,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_async.py" + "title": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_async.py" }, { "canonical": true, @@ -3709,27 +4204,23 @@ "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", "shortName": "VideoStitcherServiceClient" }, - "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.update_cdn_key", + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.list_vod_stitch_details", "method": { - "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateCdnKey", + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.ListVodStitchDetails", "service": { "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", "shortName": "VideoStitcherService" }, - "shortName": "UpdateCdnKey" + "shortName": "ListVodStitchDetails" }, "parameters": [ { "name": "request", - "type": "google.cloud.video.stitcher_v1.types.UpdateCdnKeyRequest" - }, - { - "name": "cdn_key", - "type": "google.cloud.video.stitcher_v1.types.CdnKey" + "type": "google.cloud.video.stitcher_v1.types.ListVodStitchDetailsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" }, { "name": "retry", @@ -3744,22 +4235,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_cdn_key" + "resultType": "google.cloud.video.stitcher_v1.services.video_stitcher_service.pagers.ListVodStitchDetailsPager", + "shortName": "list_vod_stitch_details" }, - "description": "Sample for UpdateCdnKey", - "file": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_sync.py", + "description": "Sample for ListVodStitchDetails", + "file": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateCdnKey_sync", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_ListVodStitchDetails_sync", "segments": [ { - "end": 54, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3769,12 +4260,181 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_list_vod_stitch_details_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", + "shortName": "VideoStitcherServiceAsyncClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.update_cdn_key", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateCdnKey", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "UpdateCdnKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.UpdateCdnKeyRequest" + }, + { + "name": "cdn_key", + "type": "google.cloud.video.stitcher_v1.types.CdnKey" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cdn_key" + }, + "description": "Sample for UpdateCdnKey", + "file": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateCdnKey_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", + "shortName": "VideoStitcherServiceClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.update_cdn_key", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateCdnKey", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "UpdateCdnKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.UpdateCdnKeyRequest" + }, + { + "name": "cdn_key", + "type": "google.cloud.video.stitcher_v1.types.CdnKey" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cdn_key" + }, + "description": "Sample for UpdateCdnKey", + "file": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateCdnKey_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, "start": 45, "type": "REQUEST_EXECUTION" }, @@ -3786,6 +4446,175 @@ ], "title": "videostitcher_v1_generated_video_stitcher_service_update_cdn_key_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", + "shortName": "VideoStitcherServiceAsyncClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.update_live_config", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateLiveConfig", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "UpdateLiveConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.UpdateLiveConfigRequest" + }, + { + "name": "live_config", + "type": "google.cloud.video.stitcher_v1.types.LiveConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_live_config" + }, + "description": "Sample for UpdateLiveConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_update_live_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateLiveConfig_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_update_live_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", + "shortName": "VideoStitcherServiceClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.update_live_config", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateLiveConfig", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "UpdateLiveConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.UpdateLiveConfigRequest" + }, + { + "name": "live_config", + "type": "google.cloud.video.stitcher_v1.types.LiveConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_live_config" + }, + "description": "Sample for UpdateLiveConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_update_live_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateLiveConfig_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_update_live_config_sync.py" + }, { "canonical": true, "clientMethod": { @@ -3954,6 +4783,175 @@ } ], "title": "videostitcher_v1_generated_video_stitcher_service_update_slate_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient", + "shortName": "VideoStitcherServiceAsyncClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceAsyncClient.update_vod_config", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateVodConfig", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "UpdateVodConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.UpdateVodConfigRequest" + }, + { + "name": "vod_config", + "type": "google.cloud.video.stitcher_v1.types.VodConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_vod_config" + }, + "description": "Sample for UpdateVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_update_vod_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateVodConfig_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_update_vod_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient", + "shortName": "VideoStitcherServiceClient" + }, + "fullName": "google.cloud.video.stitcher_v1.VideoStitcherServiceClient.update_vod_config", + "method": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService.UpdateVodConfig", + "service": { + "fullName": "google.cloud.video.stitcher.v1.VideoStitcherService", + "shortName": "VideoStitcherService" + }, + "shortName": "UpdateVodConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.stitcher_v1.types.UpdateVodConfigRequest" + }, + { + "name": "vod_config", + "type": "google.cloud.video.stitcher_v1.types.VodConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_vod_config" + }, + "description": "Sample for UpdateVodConfig", + "file": "videostitcher_v1_generated_video_stitcher_service_update_vod_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "videostitcher_v1_generated_VideoStitcherService_UpdateVodConfig_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "videostitcher_v1_generated_video_stitcher_service_update_vod_config_sync.py" } ] } diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_config_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_config_async.py new file mode 100644 index 000000000000..ba17649a88a9 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_config_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_CreateVodConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +async def sample_create_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.CreateVodConfigRequest( + parent="parent_value", + vod_config_id="vod_config_id_value", + vod_config=vod_config, + ) + + # Make the request + operation = client.create_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_CreateVodConfig_async] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_config_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_config_sync.py new file mode 100644 index 000000000000..10b61cd0453a --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_config_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_CreateVodConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +def sample_create_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.CreateVodConfigRequest( + parent="parent_value", + vod_config_id="vod_config_id_value", + vod_config=vod_config, + ) + + # Make the request + operation = client.create_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_CreateVodConfig_sync] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_async.py index 50716c597dcb..da083ca98022 100644 --- a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_async.py +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_async.py @@ -40,8 +40,6 @@ async def sample_create_vod_session(): # Initialize request argument(s) vod_session = stitcher_v1.VodSession() - vod_session.source_uri = "source_uri_value" - vod_session.ad_tag_uri = "ad_tag_uri_value" vod_session.ad_tracking = "SERVER" request = stitcher_v1.CreateVodSessionRequest( diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_sync.py index 2084954ce9eb..00d1a7b202f3 100644 --- a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_sync.py +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_create_vod_session_sync.py @@ -40,8 +40,6 @@ def sample_create_vod_session(): # Initialize request argument(s) vod_session = stitcher_v1.VodSession() - vod_session.source_uri = "source_uri_value" - vod_session.ad_tag_uri = "ad_tag_uri_value" vod_session.ad_tracking = "SERVER" request = stitcher_v1.CreateVodSessionRequest( diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_delete_vod_config_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_delete_vod_config_async.py new file mode 100644 index 000000000000..e28cfc5ec82d --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_delete_vod_config_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_DeleteVodConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +async def sample_delete_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + request = stitcher_v1.DeleteVodConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_DeleteVodConfig_async] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_delete_vod_config_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_delete_vod_config_sync.py new file mode 100644 index 000000000000..f86a89b25190 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_delete_vod_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_DeleteVodConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +def sample_delete_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + request = stitcher_v1.DeleteVodConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_DeleteVodConfig_sync] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_get_vod_config_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_get_vod_config_async.py new file mode 100644 index 000000000000..bb8a886f7809 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_get_vod_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_GetVodConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +async def sample_get_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + request = stitcher_v1.GetVodConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vod_config(request=request) + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_GetVodConfig_async] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_get_vod_config_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_get_vod_config_sync.py new file mode 100644 index 000000000000..df0c23bf65d7 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_get_vod_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_GetVodConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +def sample_get_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + request = stitcher_v1.GetVodConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_vod_config(request=request) + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_GetVodConfig_sync] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_list_vod_configs_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_list_vod_configs_async.py new file mode 100644 index 000000000000..f2e09b660520 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_list_vod_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVodConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_ListVodConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +async def sample_list_vod_configs(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + request = stitcher_v1.ListVodConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vod_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_ListVodConfigs_async] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_list_vod_configs_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_list_vod_configs_sync.py new file mode 100644 index 000000000000..69c34e2dd7e4 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_list_vod_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVodConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_ListVodConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +def sample_list_vod_configs(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + request = stitcher_v1.ListVodConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vod_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_ListVodConfigs_sync] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_live_config_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_live_config_async.py new file mode 100644 index 000000000000..0fa5d6a31dac --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_live_config_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLiveConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_UpdateLiveConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +async def sample_update_live_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + live_config = stitcher_v1.LiveConfig() + live_config.source_uri = "source_uri_value" + live_config.ad_tracking = "SERVER" + + request = stitcher_v1.UpdateLiveConfigRequest( + live_config=live_config, + ) + + # Make the request + operation = client.update_live_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_UpdateLiveConfig_async] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_live_config_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_live_config_sync.py new file mode 100644 index 000000000000..1998800a0466 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_live_config_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLiveConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_UpdateLiveConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +def sample_update_live_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + live_config = stitcher_v1.LiveConfig() + live_config.source_uri = "source_uri_value" + live_config.ad_tracking = "SERVER" + + request = stitcher_v1.UpdateLiveConfigRequest( + live_config=live_config, + ) + + # Make the request + operation = client.update_live_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_UpdateLiveConfig_sync] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_vod_config_async.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_vod_config_async.py new file mode 100644 index 000000000000..c58a48425731 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_vod_config_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_UpdateVodConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +async def sample_update_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceAsyncClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.UpdateVodConfigRequest( + vod_config=vod_config, + ) + + # Make the request + operation = client.update_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_UpdateVodConfig_async] diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_vod_config_sync.py b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_vod_config_sync.py new file mode 100644 index 000000000000..34e19e5e3e04 --- /dev/null +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/videostitcher_v1_generated_video_stitcher_service_update_vod_config_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVodConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-stitcher + + +# [START videostitcher_v1_generated_VideoStitcherService_UpdateVodConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import stitcher_v1 + + +def sample_update_vod_config(): + # Create a client + client = stitcher_v1.VideoStitcherServiceClient() + + # Initialize request argument(s) + vod_config = stitcher_v1.VodConfig() + vod_config.source_uri = "source_uri_value" + vod_config.ad_tag_uri = "ad_tag_uri_value" + + request = stitcher_v1.UpdateVodConfigRequest( + vod_config=vod_config, + ) + + # Make the request + operation = client.update_vod_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END videostitcher_v1_generated_VideoStitcherService_UpdateVodConfig_sync] diff --git a/packages/google-cloud-video-stitcher/scripts/fixup_stitcher_v1_keywords.py b/packages/google-cloud-video-stitcher/scripts/fixup_stitcher_v1_keywords.py index 506fe24de104..18b7983c3985 100644 --- a/packages/google-cloud-video-stitcher/scripts/fixup_stitcher_v1_keywords.py +++ b/packages/google-cloud-video-stitcher/scripts/fixup_stitcher_v1_keywords.py @@ -43,16 +43,19 @@ class stitcherCallTransformer(cst.CSTTransformer): 'create_live_config': ('parent', 'live_config_id', 'live_config', 'request_id', ), 'create_live_session': ('parent', 'live_session', ), 'create_slate': ('parent', 'slate_id', 'slate', 'request_id', ), + 'create_vod_config': ('parent', 'vod_config_id', 'vod_config', 'request_id', ), 'create_vod_session': ('parent', 'vod_session', ), 'delete_cdn_key': ('name', ), 'delete_live_config': ('name', ), 'delete_slate': ('name', ), + 'delete_vod_config': ('name', ), 'get_cdn_key': ('name', ), 'get_live_ad_tag_detail': ('name', ), 'get_live_config': ('name', ), 'get_live_session': ('name', ), 'get_slate': ('name', ), 'get_vod_ad_tag_detail': ('name', ), + 'get_vod_config': ('name', ), 'get_vod_session': ('name', ), 'get_vod_stitch_detail': ('name', ), 'list_cdn_keys': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), @@ -60,9 +63,12 @@ class stitcherCallTransformer(cst.CSTTransformer): 'list_live_configs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_slates': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_vod_ad_tag_details': ('parent', 'page_size', 'page_token', ), + 'list_vod_configs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_vod_stitch_details': ('parent', 'page_size', 'page_token', ), 'update_cdn_key': ('cdn_key', 'update_mask', ), + 'update_live_config': ('live_config', 'update_mask', ), 'update_slate': ('slate', 'update_mask', ), + 'update_vod_config': ('vod_config', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py b/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py index 479ee31d2025..54a93dba2e68 100644 --- a/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py +++ b/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py @@ -61,11 +61,13 @@ cdn_keys, companions, events, + fetch_options, live_configs, sessions, slates, stitch_details, video_stitcher_service, + vod_configs, ) @@ -1979,13 +1981,13 @@ def test_list_cdn_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_cdn_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3290,6 +3292,7 @@ def test_create_vod_session(request_type, transport: str = "grpc"): ad_tag_uri="ad_tag_uri_value", asset_id="asset_id_value", ad_tracking=live_configs.AdTracking.CLIENT, + vod_config="vod_config_value", ) response = client.create_vod_session(request) @@ -3307,6 +3310,7 @@ def test_create_vod_session(request_type, transport: str = "grpc"): assert response.ad_tag_uri == "ad_tag_uri_value" assert response.asset_id == "asset_id_value" assert response.ad_tracking == live_configs.AdTracking.CLIENT + assert response.vod_config == "vod_config_value" def test_create_vod_session_empty_call(): @@ -3421,6 +3425,7 @@ async def test_create_vod_session_empty_call_async(): ad_tag_uri="ad_tag_uri_value", asset_id="asset_id_value", ad_tracking=live_configs.AdTracking.CLIENT, + vod_config="vod_config_value", ) ) response = await client.create_vod_session() @@ -3502,6 +3507,7 @@ async def test_create_vod_session_async( ad_tag_uri="ad_tag_uri_value", asset_id="asset_id_value", ad_tracking=live_configs.AdTracking.CLIENT, + vod_config="vod_config_value", ) ) response = await client.create_vod_session(request) @@ -3520,6 +3526,7 @@ async def test_create_vod_session_async( assert response.ad_tag_uri == "ad_tag_uri_value" assert response.asset_id == "asset_id_value" assert response.ad_tracking == live_configs.AdTracking.CLIENT + assert response.vod_config == "vod_config_value" @pytest.mark.asyncio @@ -3711,6 +3718,7 @@ def test_get_vod_session(request_type, transport: str = "grpc"): ad_tag_uri="ad_tag_uri_value", asset_id="asset_id_value", ad_tracking=live_configs.AdTracking.CLIENT, + vod_config="vod_config_value", ) response = client.get_vod_session(request) @@ -3728,6 +3736,7 @@ def test_get_vod_session(request_type, transport: str = "grpc"): assert response.ad_tag_uri == "ad_tag_uri_value" assert response.asset_id == "asset_id_value" assert response.ad_tracking == live_configs.AdTracking.CLIENT + assert response.vod_config == "vod_config_value" def test_get_vod_session_empty_call(): @@ -3832,6 +3841,7 @@ async def test_get_vod_session_empty_call_async(): ad_tag_uri="ad_tag_uri_value", asset_id="asset_id_value", ad_tracking=live_configs.AdTracking.CLIENT, + vod_config="vod_config_value", ) ) response = await client.get_vod_session() @@ -3911,6 +3921,7 @@ async def test_get_vod_session_async( ad_tag_uri="ad_tag_uri_value", asset_id="asset_id_value", ad_tracking=live_configs.AdTracking.CLIENT, + vod_config="vod_config_value", ) ) response = await client.get_vod_session(request) @@ -3929,6 +3940,7 @@ async def test_get_vod_session_async( assert response.ad_tag_uri == "ad_tag_uri_value" assert response.asset_id == "asset_id_value" assert response.ad_tracking == live_configs.AdTracking.CLIENT + assert response.vod_config == "vod_config_value" @pytest.mark.asyncio @@ -4509,13 +4521,13 @@ def test_list_vod_stitch_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_vod_stitch_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5495,13 +5507,13 @@ def test_list_vod_ad_tag_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_vod_ad_tag_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6481,13 +6493,13 @@ def test_list_live_ad_tag_details_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_live_ad_tag_details(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7845,13 +7857,13 @@ def test_list_slates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_slates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9138,6 +9150,7 @@ def test_create_live_session(request_type, transport: str = "grpc"): name="name_value", play_uri="play_uri_value", live_config="live_config_value", + ad_tracking=live_configs.AdTracking.CLIENT, ) response = client.create_live_session(request) @@ -9152,6 +9165,7 @@ def test_create_live_session(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.play_uri == "play_uri_value" assert response.live_config == "live_config_value" + assert response.ad_tracking == live_configs.AdTracking.CLIENT def test_create_live_session_empty_call(): @@ -9263,6 +9277,7 @@ async def test_create_live_session_empty_call_async(): name="name_value", play_uri="play_uri_value", live_config="live_config_value", + ad_tracking=live_configs.AdTracking.CLIENT, ) ) response = await client.create_live_session() @@ -9341,6 +9356,7 @@ async def test_create_live_session_async( name="name_value", play_uri="play_uri_value", live_config="live_config_value", + ad_tracking=live_configs.AdTracking.CLIENT, ) ) response = await client.create_live_session(request) @@ -9356,6 +9372,7 @@ async def test_create_live_session_async( assert response.name == "name_value" assert response.play_uri == "play_uri_value" assert response.live_config == "live_config_value" + assert response.ad_tracking == live_configs.AdTracking.CLIENT @pytest.mark.asyncio @@ -9548,6 +9565,7 @@ def test_get_live_session(request_type, transport: str = "grpc"): name="name_value", play_uri="play_uri_value", live_config="live_config_value", + ad_tracking=live_configs.AdTracking.CLIENT, ) response = client.get_live_session(request) @@ -9562,6 +9580,7 @@ def test_get_live_session(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.play_uri == "play_uri_value" assert response.live_config == "live_config_value" + assert response.ad_tracking == live_configs.AdTracking.CLIENT def test_get_live_session_empty_call(): @@ -9665,6 +9684,7 @@ async def test_get_live_session_empty_call_async(): name="name_value", play_uri="play_uri_value", live_config="live_config_value", + ad_tracking=live_configs.AdTracking.CLIENT, ) ) response = await client.get_live_session() @@ -9741,6 +9761,7 @@ async def test_get_live_session_async( name="name_value", play_uri="play_uri_value", live_config="live_config_value", + ad_tracking=live_configs.AdTracking.CLIENT, ) ) response = await client.get_live_session(request) @@ -9756,6 +9777,7 @@ async def test_get_live_session_async( assert response.name == "name_value" assert response.play_uri == "play_uri_value" assert response.live_config == "live_config_value" + assert response.ad_tracking == live_configs.AdTracking.CLIENT @pytest.mark.asyncio @@ -10762,13 +10784,13 @@ def test_list_live_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_live_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11717,132 +11739,2688 @@ async def test_delete_live_config_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.VideoStitcherServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + video_stitcher_service.UpdateLiveConfigRequest, + dict, + ], +) +def test_update_live_config(request_type, transport: str = "grpc"): + client = VideoStitcherServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = VideoStitcherServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.VideoStitcherServiceGrpcTransport( + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_live_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.UpdateLiveConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_live_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - with pytest.raises(ValueError): - client = VideoStitcherServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.update_live_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.UpdateLiveConfigRequest() - # It is an error to provide an api_key and a transport instance. - transport = transports.VideoStitcherServiceGrpcTransport( + +def test_update_live_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VideoStitcherServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = VideoStitcherServiceClient( - client_options=options, - transport=transport, + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = video_stitcher_service.UpdateLiveConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.update_live_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.UpdateLiveConfigRequest() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): + +def test_update_live_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = VideoStitcherServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # It is an error to provide scopes and a transport instance. - transport = transports.VideoStitcherServiceGrpcTransport( + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_live_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_live_config + ] = mock_rpc + request = {} + client.update_live_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_live_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_live_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - with pytest.raises(ValueError): - client = VideoStitcherServiceClient( - client_options={"scopes": ["1", "2"]}, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_live_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.UpdateLiveConfigRequest() + + +@pytest.mark.asyncio +async def test_update_live_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.VideoStitcherServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = VideoStitcherServiceClient(transport=transport) - assert client.transport is transport + # Ensure method has been cached + assert ( + client._client._transport.update_live_config + in client._client._transport._wrapped_methods + ) + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.VideoStitcherServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_live_config + ] = mock_object - transport = transports.VideoStitcherServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + request = {} + await client.update_live_config(request) + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 -@pytest.mark.parametrize( - "transport_class", - [ - transports.VideoStitcherServiceGrpcTransport, - transports.VideoStitcherServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + await client.update_live_config(request) -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = VideoStitcherServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = VideoStitcherServiceClient( +@pytest.mark.asyncio +async def test_update_live_config_async( + transport: str = "grpc_asyncio", + request_type=video_stitcher_service.UpdateLiveConfigRequest, +): + client = VideoStitcherServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.VideoStitcherServiceGrpcTransport, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_video_stitcher_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.VideoStitcherServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) + response = await client.update_live_config(request) - -def test_video_stitcher_service_base_transport(): - # Instantiate the base transport. + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.UpdateLiveConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_live_config_async_from_dict(): + await test_update_live_config_async(request_type=dict) + + +def test_update_live_config_field_headers(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.UpdateLiveConfigRequest() + + request.live_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_live_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "live_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_live_config_field_headers_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.UpdateLiveConfigRequest() + + request.live_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_live_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "live_config.name=name_value", + ) in kw["metadata"] + + +def test_update_live_config_flattened(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_live_config( + live_config=live_configs.LiveConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].live_config + mock_val = live_configs.LiveConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_live_config_flattened_error(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_live_config( + video_stitcher_service.UpdateLiveConfigRequest(), + live_config=live_configs.LiveConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_live_config_flattened_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_live_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_live_config( + live_config=live_configs.LiveConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].live_config + mock_val = live_configs.LiveConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_live_config_flattened_error_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_live_config( + video_stitcher_service.UpdateLiveConfigRequest(), + live_config=live_configs.LiveConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_stitcher_service.CreateVodConfigRequest, + dict, + ], +) +def test_create_vod_config(request_type, transport: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.CreateVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_vod_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.CreateVodConfigRequest() + + +def test_create_vod_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = video_stitcher_service.CreateVodConfigRequest( + parent="parent_value", + vod_config_id="vod_config_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_vod_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.CreateVodConfigRequest( + parent="parent_value", + vod_config_id="vod_config_id_value", + request_id="request_id_value", + ) + + +def test_create_vod_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_vod_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_vod_config + ] = mock_rpc + request = {} + client.create_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_vod_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.CreateVodConfigRequest() + + +@pytest.mark.asyncio +async def test_create_vod_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_vod_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_vod_config + ] = mock_object + + request = {} + await client.create_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_vod_config_async( + transport: str = "grpc_asyncio", + request_type=video_stitcher_service.CreateVodConfigRequest, +): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.CreateVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_vod_config_async_from_dict(): + await test_create_vod_config_async(request_type=dict) + + +def test_create_vod_config_field_headers(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.CreateVodConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_vod_config_field_headers_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.CreateVodConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_vod_config_flattened(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_vod_config( + parent="parent_value", + vod_config=vod_configs.VodConfig(name="name_value"), + vod_config_id="vod_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].vod_config + mock_val = vod_configs.VodConfig(name="name_value") + assert arg == mock_val + arg = args[0].vod_config_id + mock_val = "vod_config_id_value" + assert arg == mock_val + + +def test_create_vod_config_flattened_error(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_vod_config( + video_stitcher_service.CreateVodConfigRequest(), + parent="parent_value", + vod_config=vod_configs.VodConfig(name="name_value"), + vod_config_id="vod_config_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_vod_config_flattened_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_vod_config( + parent="parent_value", + vod_config=vod_configs.VodConfig(name="name_value"), + vod_config_id="vod_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].vod_config + mock_val = vod_configs.VodConfig(name="name_value") + assert arg == mock_val + arg = args[0].vod_config_id + mock_val = "vod_config_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_vod_config_flattened_error_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_vod_config( + video_stitcher_service.CreateVodConfigRequest(), + parent="parent_value", + vod_config=vod_configs.VodConfig(name="name_value"), + vod_config_id="vod_config_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_stitcher_service.ListVodConfigsRequest, + dict, + ], +) +def test_list_vod_configs(request_type, transport: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = video_stitcher_service.ListVodConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_vod_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.ListVodConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVodConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_vod_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_vod_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.ListVodConfigsRequest() + + +def test_list_vod_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = video_stitcher_service.ListVodConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_vod_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.ListVodConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_vod_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_vod_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_vod_configs + ] = mock_rpc + request = {} + client.list_vod_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_vod_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_vod_configs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + video_stitcher_service.ListVodConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_vod_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.ListVodConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_vod_configs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_vod_configs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_vod_configs + ] = mock_object + + request = {} + await client.list_vod_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_vod_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_vod_configs_async( + transport: str = "grpc_asyncio", + request_type=video_stitcher_service.ListVodConfigsRequest, +): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + video_stitcher_service.ListVodConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_vod_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.ListVodConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVodConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_vod_configs_async_from_dict(): + await test_list_vod_configs_async(request_type=dict) + + +def test_list_vod_configs_field_headers(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.ListVodConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + call.return_value = video_stitcher_service.ListVodConfigsResponse() + client.list_vod_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_vod_configs_field_headers_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.ListVodConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + video_stitcher_service.ListVodConfigsResponse() + ) + await client.list_vod_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_vod_configs_flattened(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = video_stitcher_service.ListVodConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_vod_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_vod_configs_flattened_error(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vod_configs( + video_stitcher_service.ListVodConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_vod_configs_flattened_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = video_stitcher_service.ListVodConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + video_stitcher_service.ListVodConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_vod_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_vod_configs_flattened_error_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_vod_configs( + video_stitcher_service.ListVodConfigsRequest(), + parent="parent_value", + ) + + +def test_list_vod_configs_pager(transport_name: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + next_page_token="abc", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[], + next_page_token="def", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + ], + next_page_token="ghi", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_vod_configs(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vod_configs.VodConfig) for i in results) + + +def test_list_vod_configs_pages(transport_name: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_vod_configs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + next_page_token="abc", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[], + next_page_token="def", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + ], + next_page_token="ghi", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_vod_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_vod_configs_async_pager(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vod_configs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + next_page_token="abc", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[], + next_page_token="def", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + ], + next_page_token="ghi", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_vod_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, vod_configs.VodConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_vod_configs_async_pages(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vod_configs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + next_page_token="abc", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[], + next_page_token="def", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + ], + next_page_token="ghi", + ), + video_stitcher_service.ListVodConfigsResponse( + vod_configs=[ + vod_configs.VodConfig(), + vod_configs.VodConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_vod_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + video_stitcher_service.GetVodConfigRequest, + dict, + ], +) +def test_get_vod_config(request_type, transport: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = vod_configs.VodConfig( + name="name_value", + source_uri="source_uri_value", + ad_tag_uri="ad_tag_uri_value", + state=vod_configs.VodConfig.State.CREATING, + ) + response = client.get_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.GetVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, vod_configs.VodConfig) + assert response.name == "name_value" + assert response.source_uri == "source_uri_value" + assert response.ad_tag_uri == "ad_tag_uri_value" + assert response.state == vod_configs.VodConfig.State.CREATING + + +def test_get_vod_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.GetVodConfigRequest() + + +def test_get_vod_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = video_stitcher_service.GetVodConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_vod_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.GetVodConfigRequest( + name="name_value", + ) + + +def test_get_vod_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_vod_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_vod_config] = mock_rpc + request = {} + client.get_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_vod_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vod_configs.VodConfig( + name="name_value", + source_uri="source_uri_value", + ad_tag_uri="ad_tag_uri_value", + state=vod_configs.VodConfig.State.CREATING, + ) + ) + response = await client.get_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.GetVodConfigRequest() + + +@pytest.mark.asyncio +async def test_get_vod_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_vod_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_vod_config + ] = mock_object + + request = {} + await client.get_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_vod_config_async( + transport: str = "grpc_asyncio", + request_type=video_stitcher_service.GetVodConfigRequest, +): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vod_configs.VodConfig( + name="name_value", + source_uri="source_uri_value", + ad_tag_uri="ad_tag_uri_value", + state=vod_configs.VodConfig.State.CREATING, + ) + ) + response = await client.get_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.GetVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, vod_configs.VodConfig) + assert response.name == "name_value" + assert response.source_uri == "source_uri_value" + assert response.ad_tag_uri == "ad_tag_uri_value" + assert response.state == vod_configs.VodConfig.State.CREATING + + +@pytest.mark.asyncio +async def test_get_vod_config_async_from_dict(): + await test_get_vod_config_async(request_type=dict) + + +def test_get_vod_config_field_headers(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.GetVodConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + call.return_value = vod_configs.VodConfig() + client.get_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_vod_config_field_headers_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.GetVodConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vod_configs.VodConfig() + ) + await client.get_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_vod_config_flattened(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = vod_configs.VodConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_vod_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_vod_config_flattened_error(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vod_config( + video_stitcher_service.GetVodConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_vod_config_flattened_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vod_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = vod_configs.VodConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vod_configs.VodConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_vod_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_vod_config_flattened_error_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_vod_config( + video_stitcher_service.GetVodConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_stitcher_service.DeleteVodConfigRequest, + dict, + ], +) +def test_delete_vod_config(request_type, transport: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.DeleteVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_vod_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.DeleteVodConfigRequest() + + +def test_delete_vod_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = video_stitcher_service.DeleteVodConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_vod_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.DeleteVodConfigRequest( + name="name_value", + ) + + +def test_delete_vod_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_vod_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_vod_config + ] = mock_rpc + request = {} + client.delete_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_vod_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.DeleteVodConfigRequest() + + +@pytest.mark.asyncio +async def test_delete_vod_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_vod_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_vod_config + ] = mock_object + + request = {} + await client.delete_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_vod_config_async( + transport: str = "grpc_asyncio", + request_type=video_stitcher_service.DeleteVodConfigRequest, +): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.DeleteVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_vod_config_async_from_dict(): + await test_delete_vod_config_async(request_type=dict) + + +def test_delete_vod_config_field_headers(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.DeleteVodConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_vod_config_field_headers_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.DeleteVodConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_vod_config_flattened(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_vod_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_vod_config_flattened_error(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_vod_config( + video_stitcher_service.DeleteVodConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_vod_config_flattened_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_vod_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_vod_config_flattened_error_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_vod_config( + video_stitcher_service.DeleteVodConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_stitcher_service.UpdateVodConfigRequest, + dict, + ], +) +def test_update_vod_config(request_type, transport: str = "grpc"): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.UpdateVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_vod_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.UpdateVodConfigRequest() + + +def test_update_vod_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = video_stitcher_service.UpdateVodConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_vod_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.UpdateVodConfigRequest() + + +def test_update_vod_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_vod_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_vod_config + ] = mock_rpc + request = {} + client.update_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_vod_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_vod_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_stitcher_service.UpdateVodConfigRequest() + + +@pytest.mark.asyncio +async def test_update_vod_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_vod_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_vod_config + ] = mock_object + + request = {} + await client.update_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_vod_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_vod_config_async( + transport: str = "grpc_asyncio", + request_type=video_stitcher_service.UpdateVodConfigRequest, +): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = video_stitcher_service.UpdateVodConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_vod_config_async_from_dict(): + await test_update_vod_config_async(request_type=dict) + + +def test_update_vod_config_field_headers(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.UpdateVodConfigRequest() + + request.vod_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vod_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_vod_config_field_headers_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = video_stitcher_service.UpdateVodConfigRequest() + + request.vod_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_vod_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vod_config.name=name_value", + ) in kw["metadata"] + + +def test_update_vod_config_flattened(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_vod_config( + vod_config=vod_configs.VodConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].vod_config + mock_val = vod_configs.VodConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_vod_config_flattened_error(): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_vod_config( + video_stitcher_service.UpdateVodConfigRequest(), + vod_config=vod_configs.VodConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_vod_config_flattened_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vod_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_vod_config( + vod_config=vod_configs.VodConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].vod_config + mock_val = vod_configs.VodConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_vod_config_flattened_error_async(): + client = VideoStitcherServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_vod_config( + video_stitcher_service.UpdateVodConfigRequest(), + vod_config=vod_configs.VodConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VideoStitcherServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VideoStitcherServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoStitcherServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VideoStitcherServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoStitcherServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoStitcherServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VideoStitcherServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoStitcherServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoStitcherServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VideoStitcherServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoStitcherServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.VideoStitcherServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoStitcherServiceGrpcTransport, + transports.VideoStitcherServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = VideoStitcherServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = VideoStitcherServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.VideoStitcherServiceGrpcTransport, + ) + + +def test_video_stitcher_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VideoStitcherServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_video_stitcher_service_base_transport(): + # Instantiate the base transport. with mock.patch( "google.cloud.video.stitcher_v1.services.video_stitcher_service.transports.VideoStitcherServiceTransport.__init__" ) as Transport: @@ -11878,6 +14456,12 @@ def test_video_stitcher_service_base_transport(): "list_live_configs", "get_live_config", "delete_live_config", + "update_live_config", + "create_vod_config", + "list_vod_configs", + "get_vod_config", + "delete_vod_config", + "update_vod_config", "get_operation", "cancel_operation", "delete_operation", @@ -12440,10 +15024,36 @@ def test_parse_vod_ad_tag_detail_path(): assert expected == actual -def test_vod_session_path(): +def test_vod_config_path(): project = "oyster" location = "nudibranch" - vod_session = "cuttlefish" + vod_config = "cuttlefish" + expected = "projects/{project}/locations/{location}/vodConfigs/{vod_config}".format( + project=project, + location=location, + vod_config=vod_config, + ) + actual = VideoStitcherServiceClient.vod_config_path(project, location, vod_config) + assert expected == actual + + +def test_parse_vod_config_path(): + expected = { + "project": "mussel", + "location": "winkle", + "vod_config": "nautilus", + } + path = VideoStitcherServiceClient.vod_config_path(**expected) + + # Check that the path construction is reversible. + actual = VideoStitcherServiceClient.parse_vod_config_path(path) + assert expected == actual + + +def test_vod_session_path(): + project = "scallop" + location = "abalone" + vod_session = "squid" expected = ( "projects/{project}/locations/{location}/vodSessions/{vod_session}".format( project=project, @@ -12457,9 +15067,9 @@ def test_vod_session_path(): def test_parse_vod_session_path(): expected = { - "project": "mussel", - "location": "winkle", - "vod_session": "nautilus", + "project": "clam", + "location": "whelk", + "vod_session": "octopus", } path = VideoStitcherServiceClient.vod_session_path(**expected) @@ -12469,10 +15079,10 @@ def test_parse_vod_session_path(): def test_vod_stitch_detail_path(): - project = "scallop" - location = "abalone" - vod_session = "squid" - vod_stitch_detail = "clam" + project = "oyster" + location = "nudibranch" + vod_session = "cuttlefish" + vod_stitch_detail = "mussel" expected = "projects/{project}/locations/{location}/vodSessions/{vod_session}/vodStitchDetails/{vod_stitch_detail}".format( project=project, location=location, @@ -12487,10 +15097,10 @@ def test_vod_stitch_detail_path(): def test_parse_vod_stitch_detail_path(): expected = { - "project": "whelk", - "location": "octopus", - "vod_session": "oyster", - "vod_stitch_detail": "nudibranch", + "project": "winkle", + "location": "nautilus", + "vod_session": "scallop", + "vod_stitch_detail": "abalone", } path = VideoStitcherServiceClient.vod_stitch_detail_path(**expected) @@ -12500,7 +15110,7 @@ def test_parse_vod_stitch_detail_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -12510,7 +15120,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = VideoStitcherServiceClient.common_billing_account_path(**expected) @@ -12520,7 +15130,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -12530,7 +15140,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = VideoStitcherServiceClient.common_folder_path(**expected) @@ -12540,7 +15150,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -12550,7 +15160,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = VideoStitcherServiceClient.common_organization_path(**expected) @@ -12560,7 +15170,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -12570,7 +15180,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = VideoStitcherServiceClient.common_project_path(**expected) @@ -12580,8 +15190,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -12592,8 +15202,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = VideoStitcherServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index 147731c3f75a..70cfa1a184fc 100644 --- a/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -1982,13 +1982,13 @@ def test_list_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3739,13 +3739,13 @@ def test_list_job_templates_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_job_templates(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py index af340ecd1c06..30717f5da62a 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py @@ -1980,13 +1980,13 @@ def test_list_product_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_product_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4109,13 +4109,13 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6628,13 +6628,13 @@ def test_list_reference_images_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reference_images(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8400,13 +8400,13 @@ def test_list_products_in_product_set_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_products_in_product_set(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py index 1660376f1dbd..b675ef935636 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py @@ -1979,13 +1979,13 @@ def test_list_product_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_product_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4108,13 +4108,13 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6627,13 +6627,13 @@ def test_list_reference_images_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reference_images(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8399,13 +8399,13 @@ def test_list_products_in_product_set_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_products_in_product_set(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py index c1d392f92a5d..12089bcc7042 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py @@ -1980,13 +1980,13 @@ def test_list_product_sets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_product_sets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4109,13 +4109,13 @@ def test_list_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6628,13 +6628,13 @@ def test_list_reference_images_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_reference_images(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8400,13 +8400,13 @@ def test_list_products_in_product_set_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_products_in_product_set(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/README.rst b/packages/google-cloud-visionai/README.rst index 50d8bde3dd97..25f3967754a2 100644 --- a/packages/google-cloud-visionai/README.rst +++ b/packages/google-cloud-visionai/README.rst @@ -15,7 +15,7 @@ Python Client for Vision AI API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-visionai.svg :target: https://pypi.org/project/google-cloud-visionai/ .. _Vision AI API: https://cloud.google.com/vision-ai/docs -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-visionai/latest/summary_overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-visionai/latest .. _Product Documentation: https://cloud.google.com/vision-ai/docs Quick Start diff --git a/packages/google-cloud-visionai/docs/index.rst b/packages/google-cloud-visionai/docs/index.rst index 0137f9db80c8..1d394a5a36e6 100644 --- a/packages/google-cloud-visionai/docs/index.rst +++ b/packages/google-cloud-visionai/docs/index.rst @@ -32,8 +32,3 @@ For a list of all ``google-cloud-visionai`` releases: :maxdepth: 2 CHANGELOG - -.. toctree:: - :hidden: - - summary_overview.md diff --git a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py index 92d0767e5f12..65e0c4ee6f2a 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py @@ -3434,7 +3434,6 @@ class VertexCustomConfig(proto.Message): stored in the annotations field. - To drop the prediction output, simply clear the payload field in the returned AppPlatformCloudFunctionResponse. - attach_application_metadata (bool): If true, the prediction request received by custom model will also contain metadata with the @@ -3442,13 +3441,11 @@ class VertexCustomConfig(proto.Message): 'appPlatformMetadata': { 'ingestionTime': DOUBLE; (UNIX timestamp) - 'application': STRING; + 'application': STRING; 'instanceId': STRING; 'node': STRING; 'processor': STRING; - - } - + } dynamic_config_input_topic (str): Optional. By setting the configuration_input_topic, processor will subscribe to given topic, only pub/sub topic @@ -3463,7 +3460,6 @@ class VertexCustomConfig(proto.Message): fps set inside the topic. int32 fps = 2; } This field is a member of `oneof`_ ``_dynamic_config_input_topic``. - """ max_prediction_fps: int = proto.Field( diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py index 45fc1588a825..2036dc8bf22a 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py @@ -3242,21 +3242,18 @@ class VertexCustomConfig(proto.Message): stored in the annotations field. - To drop the prediction output, simply clear the payload field in the returned AppPlatformCloudFunctionResponse. - attach_application_metadata (bool): If true, the prediction request received by custom model will also contain metadata with the following schema: 'appPlatformMetadata': { - 'ingestionTime': DOUBLE; (UNIX timestamp) - 'application': STRING; - 'instanceId': STRING; - 'node': STRING; - 'processor': STRING; - - } - + 'ingestionTime': DOUBLE; (UNIX timestamp) + 'application': STRING; + 'instanceId': STRING; + 'node': STRING; + 'processor': STRING; + } """ max_prediction_fps: int = proto.Field( diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py index c2659a714c2c..c9e30a82069e 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py @@ -1537,13 +1537,13 @@ def test_list_applications_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_applications(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5672,13 +5672,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7846,13 +7846,13 @@ def test_list_drafts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_drafts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9932,13 +9932,13 @@ def test_list_processors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py index 6998ff8389f9..2b3294b77bf6 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py @@ -1612,13 +1612,13 @@ def test_list_public_operators_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_public_operators(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2582,13 +2582,13 @@ def test_list_operators_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_operators(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4669,13 +4669,13 @@ def test_list_analyses_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_analyses(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6756,13 +6756,13 @@ def test_list_processes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py index 050062ead711..11998178e98b 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py @@ -1551,13 +1551,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3644,13 +3644,13 @@ def test_list_streams_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_streams(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6540,13 +6540,13 @@ def test_list_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8625,13 +8625,13 @@ def test_list_series_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_series(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py index 6ee50a05e907..168284104667 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py @@ -2623,13 +2623,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5055,13 +5055,13 @@ def test_view_indexed_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("index", ""),)), ) pager = client.view_indexed_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6767,13 +6767,13 @@ def test_list_indexes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_indexes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8842,13 +8842,13 @@ def test_list_corpora_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_corpora(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11622,13 +11622,13 @@ def test_list_data_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12973,13 +12973,13 @@ def test_list_annotations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_annotations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -16908,13 +16908,13 @@ def test_list_search_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_search_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -19109,13 +19109,13 @@ def test_list_search_hypernyms_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_search_hypernyms(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -19596,13 +19596,13 @@ def test_search_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("corpus", ""),)), ) pager = client.search_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20098,13 +20098,13 @@ def test_search_index_endpoint_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("index_endpoint", ""),)), ) pager = client.search_index_endpoint(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -21511,13 +21511,13 @@ def test_list_index_endpoints_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_index_endpoints(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -25028,13 +25028,13 @@ def test_list_collections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_collections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -26373,13 +26373,13 @@ def test_view_collection_items_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("collection", ""),)), ) pager = client.view_collection_items(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py index 184f356a1fec..2581a05dee54 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py @@ -1536,13 +1536,13 @@ def test_list_applications_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_applications(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5666,13 +5666,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7835,13 +7835,13 @@ def test_list_drafts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_drafts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9921,13 +9921,13 @@ def test_list_processors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_processors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py index 0f10496992fa..e1459369c809 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py @@ -1592,13 +1592,13 @@ def test_list_analyses_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_analyses(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py index a7c795e31c06..c2099162b9a3 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py @@ -1555,13 +1555,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3648,13 +3648,13 @@ def test_list_streams_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_streams(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6136,13 +6136,13 @@ def test_list_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8221,13 +8221,13 @@ def test_list_series_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_series(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py index 32f389f343bd..f3862db0c7cf 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py @@ -2621,13 +2621,13 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4684,13 +4684,13 @@ def test_list_corpora_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_corpora(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7177,13 +7177,13 @@ def test_list_data_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_data_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8528,13 +8528,13 @@ def test_list_annotations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_annotations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12174,13 +12174,13 @@ def test_list_search_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_search_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12659,13 +12659,13 @@ def test_search_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("corpus", ""),)), ) pager = client.search_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py b/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py index 97a39b53110f..a6edea356921 100644 --- a/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py +++ b/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py @@ -1516,13 +1516,13 @@ def test_list_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4008,13 +4008,13 @@ def test_list_utilization_reports_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_utilization_reports(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5835,13 +5835,13 @@ def test_list_datacenter_connectors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_datacenter_connectors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8437,13 +8437,13 @@ def test_list_migrating_vms_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_migrating_vms(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -12351,13 +12351,13 @@ def test_list_clone_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clone_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -14147,13 +14147,13 @@ def test_list_cutover_jobs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_cutover_jobs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15107,13 +15107,13 @@ def test_list_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -18005,13 +18005,13 @@ def test_list_target_projects_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_target_projects(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -20220,13 +20220,13 @@ def test_list_replication_cycles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_replication_cycles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py index 7beaebb8966f..cbd3f68507a9 100644 --- a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py +++ b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py @@ -1551,13 +1551,13 @@ def test_list_private_clouds_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_private_clouds(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4142,13 +4142,13 @@ def test_list_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6228,13 +6228,13 @@ def test_list_nodes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_nodes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7221,13 +7221,13 @@ def test_list_external_addresses_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_external_addresses(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7818,13 +7818,13 @@ def test_fetch_network_policy_external_addresses_pager(transport_name: str = "gr RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("network_policy", ""),)), ) pager = client.fetch_network_policy_external_addresses(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -10025,13 +10025,13 @@ def test_list_subnets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_subnets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11388,13 +11388,13 @@ def test_list_external_access_rules_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_external_access_rules(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -13656,13 +13656,13 @@ def test_list_logging_servers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_logging_servers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -15863,13 +15863,13 @@ def test_list_node_types_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_node_types(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -19732,13 +19732,13 @@ def test_list_network_peerings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_network_peerings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -21544,13 +21544,13 @@ def test_list_peering_routes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_peering_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -22569,13 +22569,13 @@ def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_hcx_activation_keys(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -24005,13 +24005,13 @@ def test_list_network_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_network_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -25824,13 +25824,13 @@ def test_list_management_dns_zone_bindings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_management_dns_zone_bindings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -30122,13 +30122,13 @@ def test_list_vmware_engine_networks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_vmware_engine_networks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -31620,13 +31620,13 @@ def test_list_private_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_private_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -33023,13 +33023,13 @@ def test_list_private_connection_peering_routes_pager(transport_name: str = "grp RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_private_connection_peering_routes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py b/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py index 7afadf62371b..972cdeba683d 100644 --- a/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py +++ b/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py @@ -2373,13 +2373,13 @@ def test_list_connectors_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_connectors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py index 9a921e41562c..73b2cb669dec 100644 --- a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py +++ b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py @@ -2513,13 +2513,13 @@ def test_list_scan_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_scan_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3988,13 +3988,13 @@ def test_list_scan_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_scan_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4802,13 +4802,13 @@ def test_list_crawled_urls_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_crawled_urls(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5625,13 +5625,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py index 99642d0baf2b..28d878d0c578 100644 --- a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py +++ b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py @@ -2815,13 +2815,13 @@ def test_list_scan_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_scan_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4606,13 +4606,13 @@ def test_list_scan_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_scan_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5586,13 +5586,13 @@ def test_list_crawled_urls_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_crawled_urls(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6576,13 +6576,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py index 1c124ae9e7ec..8eaea1f452ea 100644 --- a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py +++ b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py @@ -2853,13 +2853,13 @@ def test_list_scan_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_scan_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4660,13 +4660,13 @@ def test_list_scan_runs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_scan_runs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5640,13 +5640,13 @@ def test_list_crawled_urls_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_crawled_urls(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6630,13 +6630,13 @@ def test_list_findings_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_findings(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-websecurityscanner/websecurityscanner-v1-py.tar.gz b/packages/google-cloud-websecurityscanner/websecurityscanner-v1-py.tar.gz new file mode 100644 index 000000000000..634d96987545 Binary files /dev/null and b/packages/google-cloud-websecurityscanner/websecurityscanner-v1-py.tar.gz differ diff --git a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py index 7db521eaa5ba..4837f9a3bfa2 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py @@ -1466,13 +1466,13 @@ def test_list_executions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_executions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py index 9022f5e8820e..c166252f81b4 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py @@ -1461,13 +1461,13 @@ def test_list_executions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_executions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py index 9a6beeea2eb2..4dd72ff2c365 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py @@ -1502,13 +1502,13 @@ def test_list_workflows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workflows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py index 6d3daa71f73c..6e33e7eb6cfb 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py @@ -1501,13 +1501,13 @@ def test_list_workflows_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workflows(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py index 3bb9e292651f..fe50f8daf5d9 100644 --- a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py @@ -1984,13 +1984,13 @@ def test_list_workstation_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workstation_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4214,13 +4214,13 @@ def test_list_workstation_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workstation_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4812,13 +4812,13 @@ def test_list_usable_workstation_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_usable_workstation_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7014,13 +7014,13 @@ def test_list_workstations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workstations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7612,13 +7612,13 @@ def test_list_usable_workstations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_usable_workstations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py index fe17d0e9ca93..f11b2799804c 100644 --- a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py @@ -1984,13 +1984,13 @@ def test_list_workstation_clusters_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workstation_clusters(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4219,13 +4219,13 @@ def test_list_workstation_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workstation_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4817,13 +4817,13 @@ def test_list_usable_workstation_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_usable_workstation_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7019,13 +7019,13 @@ def test_list_workstations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_workstations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7617,13 +7617,13 @@ def test_list_usable_workstations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_usable_workstations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py index 364164ddb134..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py index 364164ddb134..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json index 741d00c9dd61..884ac226d0e4 100644 --- a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json +++ b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine-delivery", - "version": "0.2.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py b/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py index 497378bbdeb5..dc9787dfe3ce 100644 --- a/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py +++ b/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py @@ -4060,10 +4060,10 @@ def test_list_tasks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_tasks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4983,10 +4983,10 @@ def test_list_delivery_vehicles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_delivery_vehicles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py b/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py index 364164ddb134..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py index 364164ddb134..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json b/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json index 897d98cd72c2..b0139d358773 100644 --- a/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json +++ b/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine", - "version": "0.2.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py index f3a3680e2629..8f2cc133fb21 100644 --- a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py +++ b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py @@ -2194,10 +2194,10 @@ def test_search_trips_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.search_trips(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py index aa6d3c877fa6..d736abed1753 100644 --- a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py +++ b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py @@ -2520,10 +2520,10 @@ def test_list_vehicles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_vehicles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-maps-mapsplatformdatasets/.repo-metadata.json b/packages/google-maps-mapsplatformdatasets/.repo-metadata.json index 70cf6a75357b..6343b17f453a 100644 --- a/packages/google-maps-mapsplatformdatasets/.repo-metadata.json +++ b/packages/google-maps-mapsplatformdatasets/.repo-metadata.json @@ -11,7 +11,7 @@ "repo": "googleapis/google-cloud-python", "distribution_name": "google-maps-mapsplatformdatasets", "api_id": "mapsplatformdatasets.googleapis.com", - "default_version": "v1alpha", + "default_version": "v1", "codeowner_team": "", "api_shortname": "mapsplatformdatasets" } diff --git a/packages/google-maps-mapsplatformdatasets/CHANGELOG.md b/packages/google-maps-mapsplatformdatasets/CHANGELOG.md index 3e7d4e024ede..5af7e7004fc4 100644 --- a/packages/google-maps-mapsplatformdatasets/CHANGELOG.md +++ b/packages/google-maps-mapsplatformdatasets/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-maps-mapsplatformdatasets-v0.3.7...google-maps-mapsplatformdatasets-v0.4.0) (2024-05-29) + + +### ⚠ BREAKING CHANGES + +* set google.maps.mapsplatformdatasets_v1 as the default import +* removed v1alpha libraries + +### Features + +* added support for getting/listing active version(s) ([338ef52](https://github.com/googleapis/google-cloud-python/commit/338ef523e37711f9739fca79dfdc0e63fbcaeb23)) +* removed v1alpha libraries ([338ef52](https://github.com/googleapis/google-cloud-python/commit/338ef523e37711f9739fca79dfdc0e63fbcaeb23)) +* set google.maps.mapsplatformdatasets_v1 as the default import ([338ef52](https://github.com/googleapis/google-cloud-python/commit/338ef523e37711f9739fca79dfdc0e63fbcaeb23)) + + +### Documentation + +* general improvements ([338ef52](https://github.com/googleapis/google-cloud-python/commit/338ef523e37711f9739fca79dfdc0e63fbcaeb23)) + ## [0.3.7](https://github.com/googleapis/google-cloud-python/compare/google-maps-mapsplatformdatasets-v0.3.6...google-maps-mapsplatformdatasets-v0.3.7) (2024-03-05) diff --git a/packages/google-maps-mapsplatformdatasets/docs/index.rst b/packages/google-maps-mapsplatformdatasets/docs/index.rst index ad6cf80d134c..3cb5a4fb82b7 100644 --- a/packages/google-maps-mapsplatformdatasets/docs/index.rst +++ b/packages/google-maps-mapsplatformdatasets/docs/index.rst @@ -2,17 +2,6 @@ .. include:: multiprocessing.rst -This package includes clients for multiple versions of Maps Platform Datasets API. -By default, you will get version ``mapsplatformdatasets_v1alpha``. - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - mapsplatformdatasets_v1alpha/services_ - mapsplatformdatasets_v1alpha/types_ API Reference ------------- diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/maps_platform_datasets_v1_alpha.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/maps_platform_datasets_v1_alpha.rst deleted file mode 100644 index c47c959a89c9..000000000000 --- a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/maps_platform_datasets_v1_alpha.rst +++ /dev/null @@ -1,10 +0,0 @@ -MapsPlatformDatasetsV1Alpha ---------------------------------------------- - -.. automodule:: google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha - :members: - :inherited-members: - -.. automodule:: google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers - :members: - :inherited-members: diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/services_.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/services_.rst deleted file mode 100644 index ad5d13db62d9..000000000000 --- a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Maps Mapsplatformdatasets v1alpha API -========================================================= -.. toctree:: - :maxdepth: 2 - - maps_platform_datasets_v1_alpha diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/types_.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/types_.rst deleted file mode 100644 index d938f9f9a8a6..000000000000 --- a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Maps Mapsplatformdatasets v1alpha API -====================================================== - -.. automodule:: google.maps.mapsplatformdatasets_v1alpha.types - :members: - :show-inheritance: diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py index d94c443bc494..af2784ae70f5 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py @@ -18,46 +18,40 @@ __version__ = package_version.__version__ -from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.async_client import ( - MapsPlatformDatasetsV1AlphaAsyncClient, +from google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.async_client import ( + MapsPlatformDatasetsAsyncClient, ) -from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.client import ( - MapsPlatformDatasetsV1AlphaClient, +from google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.client import ( + MapsPlatformDatasetsClient, ) -from google.maps.mapsplatformdatasets_v1alpha.types.data_source import ( +from google.maps.mapsplatformdatasets_v1.types.data_source import ( FileFormat, GcsSource, LocalFileSource, ) -from google.maps.mapsplatformdatasets_v1alpha.types.dataset import Dataset, State, Usage -from google.maps.mapsplatformdatasets_v1alpha.types.maps_platform_datasets import ( +from google.maps.mapsplatformdatasets_v1.types.dataset import Dataset, Status, Usage +from google.maps.mapsplatformdatasets_v1.types.maps_platform_datasets import ( CreateDatasetRequest, DeleteDatasetRequest, - DeleteDatasetVersionRequest, GetDatasetRequest, ListDatasetsRequest, ListDatasetsResponse, - ListDatasetVersionsRequest, - ListDatasetVersionsResponse, UpdateDatasetMetadataRequest, ) __all__ = ( - "MapsPlatformDatasetsV1AlphaClient", - "MapsPlatformDatasetsV1AlphaAsyncClient", + "MapsPlatformDatasetsClient", + "MapsPlatformDatasetsAsyncClient", "GcsSource", "LocalFileSource", "FileFormat", "Dataset", - "State", + "Status", "Usage", "CreateDatasetRequest", "DeleteDatasetRequest", - "DeleteDatasetVersionRequest", "GetDatasetRequest", "ListDatasetsRequest", "ListDatasetsResponse", - "ListDatasetVersionsRequest", - "ListDatasetVersionsResponse", "UpdateDatasetMetadataRequest", ) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py index 5250300b0b2e..4ea4e5108b58 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py @@ -280,7 +280,7 @@ async def create_dataset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gmm_dataset.Dataset: - r"""Create a new dataset for the specified project. + r"""Creates a new dataset for the specified project. .. code-block:: python @@ -314,7 +314,7 @@ async def sample_create_dataset(): parent (:class:`str`): Required. Parent project that will own the dataset. Format: - projects/{$project} + projects/{project} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -334,7 +334,7 @@ async def sample_create_dataset(): Returns: google.maps.mapsplatformdatasets_v1.types.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -398,7 +398,7 @@ async def update_dataset_metadata( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gmm_dataset.Dataset: - r"""Update the metadata for the dataset. + r"""Updates the metadata for the dataset. .. code-block:: python @@ -430,16 +430,16 @@ async def sample_update_dataset_metadata(): The request object. Request to update the metadata fields of the dataset. dataset (:class:`google.maps.mapsplatformdatasets_v1.types.Dataset`): - Required. The dataset to update. The dataset's name is - used to identify the dataset to be updated. The name has - the format: projects/{project}/datasets/{dataset_id} + Required. Resource name of the dataset to update. + Format: projects/{project}/datasets/{dataset_id} This corresponds to the ``dataset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. Support the value "*" - for full replacement. + The list of fields to be updated. + + The value "*" is used for full replacement (default). This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -452,7 +452,7 @@ async def sample_update_dataset_metadata(): Returns: google.maps.mapsplatformdatasets_v1.types.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -515,7 +515,7 @@ async def get_dataset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> dataset.Dataset: - r"""Get the dataset. + r"""Gets the dataset. .. code-block:: python @@ -547,9 +547,17 @@ async def sample_get_dataset(): request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.GetDatasetRequest, dict]]): The request object. Request to get the specified dataset. name (:class:`str`): - Required. Resource name. + Required. Resource name. Format: projects/{project}/datasets/{dataset_id} + Can also fetch some special versions by appending "@" + and a tag. Format: + projects/{project}/datasets/{dataset_id}@{tag} + + Tag "active": The info of the latest completed version + will be included, and NOT_FOUND if the dataset does not + have one. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -561,7 +569,7 @@ async def sample_get_dataset(): Returns: google.maps.mapsplatformdatasets_v1.types.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -622,7 +630,7 @@ async def list_datasets( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDatasetsAsyncPager: - r"""List all the datasets for the specified project. + r"""Lists all the datasets for the specified project. .. code-block:: python @@ -657,7 +665,8 @@ async def sample_list_datasets(): project. parent (:class:`str`): Required. The name of the project to - list all the datasets for. + list all the datasets for. Format: + projects/{project} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -670,8 +679,8 @@ async def sample_list_datasets(): Returns: google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers.ListDatasetsAsyncPager: - Response to list datasets for the - project. + Response object of ListDatasets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -743,7 +752,7 @@ async def delete_dataset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Delete the specified dataset . + r"""Deletes the specified dataset. .. code-block:: python @@ -771,11 +780,9 @@ async def sample_delete_dataset(): Args: request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.DeleteDatasetRequest, dict]]): The request object. Request to delete a dataset. - - The dataset to be deleted. name (:class:`str`): - Required. Format: - projects/${project}/datasets/{dataset_id} + Required. The name of the dataset to delete. Format: + projects/{project}/datasets/{dataset_id} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py index 5ab7972ff1d4..239667e2706b 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py @@ -695,7 +695,7 @@ def create_dataset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gmm_dataset.Dataset: - r"""Create a new dataset for the specified project. + r"""Creates a new dataset for the specified project. .. code-block:: python @@ -729,7 +729,7 @@ def sample_create_dataset(): parent (str): Required. Parent project that will own the dataset. Format: - projects/{$project} + projects/{project} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -749,7 +749,7 @@ def sample_create_dataset(): Returns: google.maps.mapsplatformdatasets_v1.types.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -810,7 +810,7 @@ def update_dataset_metadata( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gmm_dataset.Dataset: - r"""Update the metadata for the dataset. + r"""Updates the metadata for the dataset. .. code-block:: python @@ -842,16 +842,16 @@ def sample_update_dataset_metadata(): The request object. Request to update the metadata fields of the dataset. dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): - Required. The dataset to update. The dataset's name is - used to identify the dataset to be updated. The name has - the format: projects/{project}/datasets/{dataset_id} + Required. Resource name of the dataset to update. + Format: projects/{project}/datasets/{dataset_id} This corresponds to the ``dataset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. Support the value "*" - for full replacement. + The list of fields to be updated. + + The value "*" is used for full replacement (default). This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -864,7 +864,7 @@ def sample_update_dataset_metadata(): Returns: google.maps.mapsplatformdatasets_v1.types.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -924,7 +924,7 @@ def get_dataset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> dataset.Dataset: - r"""Get the dataset. + r"""Gets the dataset. .. code-block:: python @@ -956,9 +956,17 @@ def sample_get_dataset(): request (Union[google.maps.mapsplatformdatasets_v1.types.GetDatasetRequest, dict]): The request object. Request to get the specified dataset. name (str): - Required. Resource name. + Required. Resource name. Format: projects/{project}/datasets/{dataset_id} + Can also fetch some special versions by appending "@" + and a tag. Format: + projects/{project}/datasets/{dataset_id}@{tag} + + Tag "active": The info of the latest completed version + will be included, and NOT_FOUND if the dataset does not + have one. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -970,7 +978,7 @@ def sample_get_dataset(): Returns: google.maps.mapsplatformdatasets_v1.types.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -1028,7 +1036,7 @@ def list_datasets( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDatasetsPager: - r"""List all the datasets for the specified project. + r"""Lists all the datasets for the specified project. .. code-block:: python @@ -1063,7 +1071,8 @@ def sample_list_datasets(): project. parent (str): Required. The name of the project to - list all the datasets for. + list all the datasets for. Format: + projects/{project} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1076,8 +1085,8 @@ def sample_list_datasets(): Returns: google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers.ListDatasetsPager: - Response to list datasets for the - project. + Response object of ListDatasets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1146,7 +1155,7 @@ def delete_dataset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Delete the specified dataset . + r"""Deletes the specified dataset. .. code-block:: python @@ -1174,11 +1183,9 @@ def sample_delete_dataset(): Args: request (Union[google.maps.mapsplatformdatasets_v1.types.DeleteDatasetRequest, dict]): The request object. Request to delete a dataset. - - The dataset to be deleted. name (str): - Required. Format: - projects/${project}/datasets/{dataset_id} + Required. The name of the dataset to delete. Format: + projects/{project}/datasets/{dataset_id} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py index 5c65c669beb8..d9beffaeb8e2 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py @@ -242,7 +242,7 @@ def create_dataset( ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: r"""Return a callable for the create dataset method over gRPC. - Create a new dataset for the specified project. + Creates a new dataset for the specified project. Returns: Callable[[~.CreateDatasetRequest], @@ -270,7 +270,7 @@ def update_dataset_metadata( ]: r"""Return a callable for the update dataset metadata method over gRPC. - Update the metadata for the dataset. + Updates the metadata for the dataset. Returns: Callable[[~.UpdateDatasetMetadataRequest], @@ -296,7 +296,7 @@ def get_dataset( ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: r"""Return a callable for the get dataset method over gRPC. - Get the dataset. + Gets the dataset. Returns: Callable[[~.GetDatasetRequest], @@ -325,7 +325,7 @@ def list_datasets( ]: r"""Return a callable for the list datasets method over gRPC. - List all the datasets for the specified project. + Lists all the datasets for the specified project. Returns: Callable[[~.ListDatasetsRequest], @@ -351,7 +351,7 @@ def delete_dataset( ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: r"""Return a callable for the delete dataset method over gRPC. - Delete the specified dataset . + Deletes the specified dataset. Returns: Callable[[~.DeleteDatasetRequest], diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py index 6b43710a429f..5de0305027db 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py @@ -248,7 +248,7 @@ def create_dataset( ]: r"""Return a callable for the create dataset method over gRPC. - Create a new dataset for the specified project. + Creates a new dataset for the specified project. Returns: Callable[[~.CreateDatasetRequest], @@ -277,7 +277,7 @@ def update_dataset_metadata( ]: r"""Return a callable for the update dataset metadata method over gRPC. - Update the metadata for the dataset. + Updates the metadata for the dataset. Returns: Callable[[~.UpdateDatasetMetadataRequest], @@ -305,7 +305,7 @@ def get_dataset( ]: r"""Return a callable for the get dataset method over gRPC. - Get the dataset. + Gets the dataset. Returns: Callable[[~.GetDatasetRequest], @@ -334,7 +334,7 @@ def list_datasets( ]: r"""Return a callable for the list datasets method over gRPC. - List all the datasets for the specified project. + Lists all the datasets for the specified project. Returns: Callable[[~.ListDatasetsRequest], @@ -362,7 +362,7 @@ def delete_dataset( ]: r"""Return a callable for the delete dataset method over gRPC. - Delete the specified dataset . + Deletes the specified dataset. Returns: Callable[[~.DeleteDatasetRequest], diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py index 79ddf7c8c1c8..756dfc68600f 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py @@ -343,7 +343,7 @@ def __call__( Returns: ~.gmm_dataset.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -429,8 +429,6 @@ def __call__( Args: request (~.maps_platform_datasets.DeleteDatasetRequest): The request object. Request to delete a dataset. - - The dataset to be deleted. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -512,7 +510,7 @@ def __call__( Returns: ~.dataset.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ @@ -600,9 +598,7 @@ def __call__( Returns: ~.maps_platform_datasets.ListDatasetsResponse: - Response to list datasets for the - project. - + Response object of ListDatasets. """ http_options: List[Dict[str, str]] = [ @@ -688,7 +684,7 @@ def __call__( Returns: ~.gmm_dataset.Dataset: - A representation of a Maps Dataset + A representation of a dataset resource. """ diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py index b926719aac68..cf55e5348d5f 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py @@ -48,7 +48,7 @@ class Usage(proto.Enum): class Dataset(proto.Message): - r"""A representation of a Maps Dataset resource. + r"""A representation of a dataset resource. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -59,12 +59,14 @@ class Dataset(proto.Message): Attributes: name (str): - Resource name, projects/{project}/datasets/{dataset_id} + Resource name. Format: + projects/{project}/datasets/{dataset_id} display_name (str): - Human readable name, shown in the console UI - . + Human readable name, shown in the console UI. + + Must be unique within a project. description (str): - A description of this dataset . + A description of this dataset. version_id (str): The version ID of the dataset. usage (MutableSequence[google.maps.mapsplatformdatasets_v1.types.Usage]): @@ -89,7 +91,8 @@ class Dataset(proto.Message): Output only. Time when the dataset metadata was last updated. version_create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the data was uploaded. + Output only. Time when this version was + created. version_description (str): Output only. The description for this version of dataset. It is provided when importing data diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py index 836dd6ff4e20..736813725e93 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py @@ -41,7 +41,7 @@ class CreateDatasetRequest(proto.Message): Attributes: parent (str): Required. Parent project that will own the - dataset. Format: projects/{$project} + dataset. Format: projects/{project} dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): Required. The dataset version to create. """ @@ -62,12 +62,12 @@ class UpdateDatasetMetadataRequest(proto.Message): Attributes: dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): - Required. The dataset to update. The dataset's name is used - to identify the dataset to be updated. The name has the - format: projects/{project}/datasets/{dataset_id} + Required. Resource name of the dataset to update. Format: + projects/{project}/datasets/{dataset_id} update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. Support the value "*" for - full replacement. + The list of fields to be updated. + + The value "*" is used for full replacement (default). """ dataset: gmm_dataset.Dataset = proto.Field( @@ -87,8 +87,14 @@ class GetDatasetRequest(proto.Message): Attributes: name (str): - Required. Resource name. + Required. Resource name. Format: projects/{project}/datasets/{dataset_id} + + Can also fetch some special versions by appending "@" and a + tag. Format: projects/{project}/datasets/{dataset_id}@{tag} + + Tag "active": The info of the latest completed version will + be included, and NOT_FOUND if the dataset does not have one. """ name: str = proto.Field( @@ -103,15 +109,29 @@ class ListDatasetsRequest(proto.Message): Attributes: parent (str): Required. The name of the project to list all - the datasets for. + the datasets for. Format: projects/{project} page_size (int): - The maximum number of versions to return per - page. If unspecified (or zero), all datasets - will be returned. + The maximum number of datasets to return per + page. + If unspecified (or zero), all datasets will be + returned. page_token (str): The page token, received from a previous ListDatasets call. Provide this to retrieve the subsequent page. + tag (str): + The tag that specifies the desired version + for each dataset. + Note that when pagination is also specified, + some filtering can happen after pagination, + which may cause the response to contain fewer + datasets than the page size, even if it's not + the last page. + + Tag "active": Each dataset in the response will + include the info of its latest completed + version, and the dataset will be skipped if it + does not have one. """ parent: str = proto.Field( @@ -126,18 +146,23 @@ class ListDatasetsRequest(proto.Message): proto.STRING, number=3, ) + tag: str = proto.Field( + proto.STRING, + number=4, + ) class ListDatasetsResponse(proto.Message): - r"""Response to list datasets for the project. + r"""Response object of ListDatasets. Attributes: datasets (MutableSequence[google.maps.mapsplatformdatasets_v1.types.Dataset]): All the datasets for the project. next_page_token (str): A token that can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. + next page. + + If this field is omitted, there are no subsequent pages. """ @property @@ -158,11 +183,10 @@ def raw_page(self): class DeleteDatasetRequest(proto.Message): r"""Request to delete a dataset. - The dataset to be deleted. - Attributes: name (str): - Required. Format: projects/${project}/datasets/{dataset_id} + Required. The name of the dataset to delete. Format: + projects/{project}/datasets/{dataset_id} """ name: str = proto.Field( diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/__init__.py deleted file mode 100644 index 2349e1c727d0..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.maps_platform_datasets_v1_alpha import ( - MapsPlatformDatasetsV1AlphaAsyncClient, - MapsPlatformDatasetsV1AlphaClient, -) -from .types.data_source import FileFormat, GcsSource, LocalFileSource -from .types.dataset import Dataset, State, Usage -from .types.maps_platform_datasets import ( - CreateDatasetRequest, - DeleteDatasetRequest, - DeleteDatasetVersionRequest, - GetDatasetRequest, - ListDatasetsRequest, - ListDatasetsResponse, - ListDatasetVersionsRequest, - ListDatasetVersionsResponse, - UpdateDatasetMetadataRequest, -) - -__all__ = ( - "MapsPlatformDatasetsV1AlphaAsyncClient", - "CreateDatasetRequest", - "Dataset", - "DeleteDatasetRequest", - "DeleteDatasetVersionRequest", - "FileFormat", - "GcsSource", - "GetDatasetRequest", - "ListDatasetVersionsRequest", - "ListDatasetVersionsResponse", - "ListDatasetsRequest", - "ListDatasetsResponse", - "LocalFileSource", - "MapsPlatformDatasetsV1AlphaClient", - "State", - "UpdateDatasetMetadataRequest", - "Usage", -) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_metadata.json b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_metadata.json deleted file mode 100644 index 63cd53b2c8d7..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_metadata.json +++ /dev/null @@ -1,133 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.maps.mapsplatformdatasets_v1alpha", - "protoPackage": "google.maps.mapsplatformdatasets.v1alpha", - "schema": "1.0", - "services": { - "MapsPlatformDatasetsV1Alpha": { - "clients": { - "grpc": { - "libraryClient": "MapsPlatformDatasetsV1AlphaClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteDatasetVersion": { - "methods": [ - "delete_dataset_version" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "ListDatasetVersions": { - "methods": [ - "list_dataset_versions" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "UpdateDatasetMetadata": { - "methods": [ - "update_dataset_metadata" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MapsPlatformDatasetsV1AlphaAsyncClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteDatasetVersion": { - "methods": [ - "delete_dataset_version" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "ListDatasetVersions": { - "methods": [ - "list_dataset_versions" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "UpdateDatasetMetadata": { - "methods": [ - "update_dataset_metadata" - ] - } - } - }, - "rest": { - "libraryClient": "MapsPlatformDatasetsV1AlphaClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteDatasetVersion": { - "methods": [ - "delete_dataset_version" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "ListDatasetVersions": { - "methods": [ - "list_dataset_versions" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "UpdateDatasetMetadata": { - "methods": [ - "update_dataset_metadata" - ] - } - } - } - } - } - } -} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/py.typed b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/py.typed deleted file mode 100644 index b186faa8f14d..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-maps-mapsplatformdatasets package uses inline types. diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/__init__.py deleted file mode 100644 index 865096bda3ac..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MapsPlatformDatasetsV1AlphaTransport -from .grpc import MapsPlatformDatasetsV1AlphaGrpcTransport -from .grpc_asyncio import MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport -from .rest import ( - MapsPlatformDatasetsV1AlphaRestInterceptor, - MapsPlatformDatasetsV1AlphaRestTransport, -) - -# Compile a registry of transports. -_transport_registry = ( - OrderedDict() -) # type: Dict[str, Type[MapsPlatformDatasetsV1AlphaTransport]] -_transport_registry["grpc"] = MapsPlatformDatasetsV1AlphaGrpcTransport -_transport_registry["grpc_asyncio"] = MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport -_transport_registry["rest"] = MapsPlatformDatasetsV1AlphaRestTransport - -__all__ = ( - "MapsPlatformDatasetsV1AlphaTransport", - "MapsPlatformDatasetsV1AlphaGrpcTransport", - "MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport", - "MapsPlatformDatasetsV1AlphaRestTransport", - "MapsPlatformDatasetsV1AlphaRestInterceptor", -) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/__init__.py deleted file mode 100644 index 27f87ef22eb6..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .data_source import FileFormat, GcsSource, LocalFileSource -from .dataset import Dataset, State, Usage -from .maps_platform_datasets import ( - CreateDatasetRequest, - DeleteDatasetRequest, - DeleteDatasetVersionRequest, - GetDatasetRequest, - ListDatasetsRequest, - ListDatasetsResponse, - ListDatasetVersionsRequest, - ListDatasetVersionsResponse, - UpdateDatasetMetadataRequest, -) - -__all__ = ( - "GcsSource", - "LocalFileSource", - "FileFormat", - "Dataset", - "State", - "Usage", - "CreateDatasetRequest", - "DeleteDatasetRequest", - "DeleteDatasetVersionRequest", - "GetDatasetRequest", - "ListDatasetsRequest", - "ListDatasetsResponse", - "ListDatasetVersionsRequest", - "ListDatasetVersionsResponse", - "UpdateDatasetMetadataRequest", -) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/data_source.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/data_source.py deleted file mode 100644 index 31529cb531d3..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/data_source.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.maps.mapsplatformdatasets.v1alpha", - manifest={ - "FileFormat", - "LocalFileSource", - "GcsSource", - }, -) - - -class FileFormat(proto.Enum): - r"""The format of the file being uploaded. - - Values: - FILE_FORMAT_UNSPECIFIED (0): - Unspecified file format. - FILE_FORMAT_GEOJSON (1): - GeoJson file. - FILE_FORMAT_KML (2): - KML file. - FILE_FORMAT_CSV (3): - CSV file. - FILE_FORMAT_PROTO (4): - Protobuf file. - FILE_FORMAT_KMZ (5): - KMZ file. - """ - FILE_FORMAT_UNSPECIFIED = 0 - FILE_FORMAT_GEOJSON = 1 - FILE_FORMAT_KML = 2 - FILE_FORMAT_CSV = 3 - FILE_FORMAT_PROTO = 4 - FILE_FORMAT_KMZ = 5 - - -class LocalFileSource(proto.Message): - r"""The details about the data source when it is a local file. - - Attributes: - filename (str): - The file name and extension of the uploaded - file. - file_format (google.maps.mapsplatformdatasets_v1alpha.types.FileFormat): - The format of the file that is being - uploaded. - """ - - filename: str = proto.Field( - proto.STRING, - number=1, - ) - file_format: "FileFormat" = proto.Field( - proto.ENUM, - number=2, - enum="FileFormat", - ) - - -class GcsSource(proto.Message): - r"""The details about the data source when it is in Google Cloud - Storage. - - Attributes: - input_uri (str): - Source data URI. For example, ``gs://my_bucket/my_object``. - file_format (google.maps.mapsplatformdatasets_v1alpha.types.FileFormat): - The file format of the Google Cloud Storage - object. This is used mainly for validation. - """ - - input_uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_format: "FileFormat" = proto.Field( - proto.ENUM, - number=2, - enum="FileFormat", - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/dataset.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/dataset.py deleted file mode 100644 index ae0423c9cef0..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/dataset.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import timestamp_pb2 # type: ignore -import proto # type: ignore - -from google.maps.mapsplatformdatasets_v1alpha.types import data_source - -__protobuf__ = proto.module( - package="google.maps.mapsplatformdatasets.v1alpha", - manifest={ - "Usage", - "State", - "Dataset", - }, -) - - -class Usage(proto.Enum): - r"""Usage specifies where the data is intended to be used to - inform how to process the data. - - Values: - USAGE_UNSPECIFIED (0): - The usage of this dataset is not set. - USAGE_DATA_DRIVEN_STYLING (1): - This dataset will be used for data driven - styling. - USAGE_AREA_AFFORDANCES (2): - This dataset will be used for area - affordances in routing. - USAGE_ASSISTED_DRIVING (3): - This dataset will be used for assisted - driving in routing. - """ - USAGE_UNSPECIFIED = 0 - USAGE_DATA_DRIVEN_STYLING = 1 - USAGE_AREA_AFFORDANCES = 2 - USAGE_ASSISTED_DRIVING = 3 - - -class State(proto.Enum): - r"""State specifies the status of the import of the latest - dataset version. - - Values: - STATE_UNSPECIFIED (0): - The state of this dataset is not set. - STATE_IMPORTING (1): - The dataset version is getting imported. - STATE_IMPORT_SUCCEEDED (2): - The dataset version succeeded in getting - imported. - STATE_IMPORT_FAILED (3): - The dataset version failed to get imported. - """ - STATE_UNSPECIFIED = 0 - STATE_IMPORTING = 1 - STATE_IMPORT_SUCCEEDED = 2 - STATE_IMPORT_FAILED = 3 - - -class Dataset(proto.Message): - r"""A representation of a maps platform dataset. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Resource name, projects/{project}/datasets/{dataset_id} - display_name (str): - Human readable name, shown in the console UI. - Set by customer. - description (str): - A description of this dataset; set by the - customer. - version_id (str): - The version of the dataset. - usage (MutableSequence[google.maps.mapsplatformdatasets_v1alpha.types.Usage]): - Specified use case(s) for this dataset. - local_file_source (google.maps.mapsplatformdatasets_v1alpha.types.LocalFileSource): - A local file source for the dataset for a - single upload. - - This field is a member of `oneof`_ ``data_source``. - gcs_source (google.maps.mapsplatformdatasets_v1alpha.types.GcsSource): - A Google Cloud Storage file source for the - dataset for a single upload. - - This field is a member of `oneof`_ ``data_source``. - status (google.maps.mapsplatformdatasets_v1alpha.types.State): - The status of the import of the latest - dataset version. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the dataset was first - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the dataset metadata - was last updated. - version_create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when this version of - dataset was created. (It happened when importing - data to the dataset) - version_description (str): - Output only. The description for this version - of dataset. It is provided when importing data - to the dataset. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - version_id: str = proto.Field( - proto.STRING, - number=4, - ) - usage: MutableSequence["Usage"] = proto.RepeatedField( - proto.ENUM, - number=5, - enum="Usage", - ) - local_file_source: data_source.LocalFileSource = proto.Field( - proto.MESSAGE, - number=6, - oneof="data_source", - message=data_source.LocalFileSource, - ) - gcs_source: data_source.GcsSource = proto.Field( - proto.MESSAGE, - number=7, - oneof="data_source", - message=data_source.GcsSource, - ) - status: "State" = proto.Field( - proto.ENUM, - number=12, - enum="State", - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - version_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - version_description: str = proto.Field( - proto.STRING, - number=11, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets.py deleted file mode 100644 index ac1d5ccabae9..000000000000 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets.py +++ /dev/null @@ -1,278 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import field_mask_pb2 # type: ignore -import proto # type: ignore - -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset - -__protobuf__ = proto.module( - package="google.maps.mapsplatformdatasets.v1alpha", - manifest={ - "CreateDatasetRequest", - "UpdateDatasetMetadataRequest", - "GetDatasetRequest", - "ListDatasetVersionsRequest", - "ListDatasetVersionsResponse", - "ListDatasetsRequest", - "ListDatasetsResponse", - "DeleteDatasetRequest", - "DeleteDatasetVersionRequest", - }, -) - - -class CreateDatasetRequest(proto.Message): - r"""Request to create a maps dataset. - - Attributes: - parent (str): - Required. Parent project that will own the dataset. Format: - projects/{$project_number} - dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): - Required. The dataset version to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - dataset: gmm_dataset.Dataset = proto.Field( - proto.MESSAGE, - number=2, - message=gmm_dataset.Dataset, - ) - - -class UpdateDatasetMetadataRequest(proto.Message): - r"""Request to update the metadata fields of the dataset. - - Attributes: - dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): - Required. The dataset to update. The dataset's name is used - to identify the dataset to be updated. The name has the - format: projects/{project}/datasets/{dataset_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. Support the value "*" for - full replacement. - """ - - dataset: gmm_dataset.Dataset = proto.Field( - proto.MESSAGE, - number=1, - message=gmm_dataset.Dataset, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetDatasetRequest(proto.Message): - r"""Request to get the specified dataset. - - Attributes: - name (str): - Required. Resource name. Can also fetch a specified version - projects/{project}/datasets/{dataset_id} - projects/{project}/datasets/{dataset_id}@{version-id} - - In order to retrieve a previous version of the dataset, also - provide the version ID. Example: - projects/123/datasets/assisted-driving-preferences@c7cfa2a8 - published_usage (google.maps.mapsplatformdatasets_v1alpha.types.Usage): - If specified, will fetch the dataset details - of the version published for the specified use - case rather than the latest, if one exists. If a - published version does not exist, will default - to getting the dataset details of the latest - version. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - published_usage: gmm_dataset.Usage = proto.Field( - proto.ENUM, - number=2, - enum=gmm_dataset.Usage, - ) - - -class ListDatasetVersionsRequest(proto.Message): - r"""Request to list of all versions of the dataset. - - Attributes: - name (str): - Required. The name of the dataset to list all - the versions for. - page_size (int): - The maximum number of versions to return per - page. If unspecified (or zero), at most 1000 - versions will be returned. The maximum value is - 1000; values above 1000 will be coerced to 1000. - page_token (str): - The page token, received from a previous - GetDatasetVersions call. Provide this to - retrieve the subsequent page. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDatasetVersionsResponse(proto.Message): - r"""Response with list of all versions of the dataset. - - Attributes: - datasets (MutableSequence[google.maps.mapsplatformdatasets_v1alpha.types.Dataset]): - All the versions of the dataset. - next_page_token (str): - A token that can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - datasets: MutableSequence[gmm_dataset.Dataset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gmm_dataset.Dataset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListDatasetsRequest(proto.Message): - r"""Request to list datasets for the project. - - Attributes: - parent (str): - Required. The name of the project to list all - the datasets for. - page_size (int): - The maximum number of versions to return per - page. If unspecified (or zero), at most 1000 - datasets will be returned. The maximum value is - 1000; values above 1000 will be coerced to 1000. - page_token (str): - The page token, received from a previous - GetDatasetVersions call. Provide this to - retrieve the subsequent page. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDatasetsResponse(proto.Message): - r"""Response to list datasets for the project. - - Attributes: - datasets (MutableSequence[google.maps.mapsplatformdatasets_v1alpha.types.Dataset]): - All the datasets for the project. - next_page_token (str): - A token that can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - datasets: MutableSequence[gmm_dataset.Dataset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gmm_dataset.Dataset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDatasetRequest(proto.Message): - r"""Request to delete a dataset. - - The dataset to be deleted. - - Attributes: - name (str): - Required. Format: projects/${project}/datasets/{dataset_id} - force (bool): - If set to true, any dataset version for this - dataset will also be deleted. (Otherwise, the - request will only work if the dataset has no - versions.) - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class DeleteDatasetVersionRequest(proto.Message): - r"""Request to delete a version of a dataset. - - Attributes: - name (str): - Required. Format: - projects/${project}/datasets/{dataset_id}@{version-id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json deleted file mode 100644 index 55c8275889ff..000000000000 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json +++ /dev/null @@ -1,1146 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.maps.mapsplatformdatasets.v1alpha", - "version": "v1alpha" - } - ], - "language": "PYTHON", - "name": "google-maps-mapsplatformdatasets", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.create_dataset", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.CreateDataset", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "CreateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dataset", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", - "shortName": "create_dataset" - }, - "description": "Sample for CreateDataset", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.create_dataset", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.CreateDataset", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "CreateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dataset", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", - "shortName": "create_dataset" - }, - "description": "Sample for CreateDataset", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.delete_dataset_version", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDatasetVersion", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "DeleteDatasetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dataset_version" - }, - "description": "Sample for DeleteDatasetVersion", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.delete_dataset_version", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDatasetVersion", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "DeleteDatasetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dataset_version" - }, - "description": "Sample for DeleteDatasetVersion", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.delete_dataset", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDataset", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "DeleteDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dataset" - }, - "description": "Sample for DeleteDataset", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.delete_dataset", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDataset", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "DeleteDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dataset" - }, - "description": "Sample for DeleteDataset", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.get_dataset", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.GetDataset", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "GetDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", - "shortName": "get_dataset" - }, - "description": "Sample for GetDataset", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.get_dataset", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.GetDataset", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "GetDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", - "shortName": "get_dataset" - }, - "description": "Sample for GetDataset", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.list_dataset_versions", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasetVersions", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "ListDatasetVersions" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsAsyncPager", - "shortName": "list_dataset_versions" - }, - "description": "Sample for ListDatasetVersions", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.list_dataset_versions", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasetVersions", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "ListDatasetVersions" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsPager", - "shortName": "list_dataset_versions" - }, - "description": "Sample for ListDatasetVersions", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.list_datasets", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasets", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "ListDatasets" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsAsyncPager", - "shortName": "list_datasets" - }, - "description": "Sample for ListDatasets", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.list_datasets", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasets", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "ListDatasets" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsPager", - "shortName": "list_datasets" - }, - "description": "Sample for ListDatasets", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", - "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.update_dataset_metadata", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.UpdateDatasetMetadata", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "UpdateDatasetMetadata" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest" - }, - { - "name": "dataset", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", - "shortName": "update_dataset_metadata" - }, - "description": "Sample for UpdateDatasetMetadata", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", - "shortName": "MapsPlatformDatasetsV1AlphaClient" - }, - "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.update_dataset_metadata", - "method": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.UpdateDatasetMetadata", - "service": { - "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", - "shortName": "MapsPlatformDatasetsV1Alpha" - }, - "shortName": "UpdateDatasetMetadata" - }, - "parameters": [ - { - "name": "request", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest" - }, - { - "name": "dataset", - "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", - "shortName": "update_dataset_metadata" - }, - "description": "Sample for UpdateDatasetMetadata", - "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py" - } - ] -} diff --git a/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py b/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py index 42e2c9ca0c40..3c386e4bd1e9 100644 --- a/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py +++ b/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py @@ -42,7 +42,7 @@ class mapsplatformdatasetsCallTransformer(cst.CSTTransformer): 'create_dataset': ('parent', 'dataset', ), 'delete_dataset': ('name', ), 'get_dataset': ('name', ), - 'list_datasets': ('parent', 'page_size', 'page_token', ), + 'list_datasets': ('parent', 'page_size', 'page_token', 'tag', ), 'update_dataset_metadata': ('dataset', 'update_mask', ), } diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py index e6594ee80f0b..44315171cfdc 100644 --- a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py @@ -2470,6 +2470,7 @@ def test_list_datasets_non_empty_request_with_auto_populated_field(): request = maps_platform_datasets.ListDatasetsRequest( parent="parent_value", page_token="page_token_value", + tag="tag_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2483,6 +2484,7 @@ def test_list_datasets_non_empty_request_with_auto_populated_field(): assert args[0] == maps_platform_datasets.ListDatasetsRequest( parent="parent_value", page_token="page_token_value", + tag="tag_value", ) @@ -2810,13 +2812,13 @@ def test_list_datasets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_datasets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4534,6 +4536,7 @@ def test_list_datasets_rest_required_fields( ( "page_size", "page_token", + "tag", ) ) jsonified_request.update(unset_fields) @@ -4594,6 +4597,7 @@ def test_list_datasets_rest_unset_required_fields(): ( "pageSize", "pageToken", + "tag", ) ) & set(("parent",)) diff --git a/packages/google-maps-places/CHANGELOG.md b/packages/google-maps-places/CHANGELOG.md index d850d99c2c07..181b36c863ab 100644 --- a/packages/google-maps-places/CHANGELOG.md +++ b/packages/google-maps-places/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.15](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.14...google-maps-places-v0.1.15) (2024-05-27) + + +### Features + +* add `generative_summary` and `area_summary` for place summaries ([46d0d9f](https://github.com/googleapis/google-cloud-python/commit/46d0d9f863049c257b8bfa15cfce0ea0f3530c5a)) + ## [0.1.14](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.13...google-maps-places-v0.1.14) (2024-05-07) diff --git a/packages/google-maps-places/google/maps/places/__init__.py b/packages/google-maps-places/google/maps/places/__init__.py index 2446b06182f1..ba2545071c8b 100644 --- a/packages/google-maps-places/google/maps/places/__init__.py +++ b/packages/google-maps-places/google/maps/places/__init__.py @@ -21,6 +21,8 @@ from google.maps.places_v1.services.places.async_client import PlacesAsyncClient from google.maps.places_v1.services.places.client import PlacesClient from google.maps.places_v1.types.attribution import AuthorAttribution +from google.maps.places_v1.types.content_block import ContentBlock +from google.maps.places_v1.types.contextual_content import ContextualContent from google.maps.places_v1.types.ev_charging import EVChargeOptions, EVConnectorType from google.maps.places_v1.types.fuel_options import FuelOptions from google.maps.places_v1.types.geometry import Circle @@ -37,12 +39,15 @@ SearchTextRequest, SearchTextResponse, ) +from google.maps.places_v1.types.reference import References from google.maps.places_v1.types.review import Review __all__ = ( "PlacesClient", "PlacesAsyncClient", "AuthorAttribution", + "ContentBlock", + "ContextualContent", "EVChargeOptions", "EVConnectorType", "FuelOptions", @@ -59,5 +64,6 @@ "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "References", "Review", ) diff --git a/packages/google-maps-places/google/maps/places_v1/__init__.py b/packages/google-maps-places/google/maps/places_v1/__init__.py index e58a341f7e44..bb2075cda07a 100644 --- a/packages/google-maps-places/google/maps/places_v1/__init__.py +++ b/packages/google-maps-places/google/maps/places_v1/__init__.py @@ -20,6 +20,8 @@ from .services.places import PlacesAsyncClient, PlacesClient from .types.attribution import AuthorAttribution +from .types.content_block import ContentBlock +from .types.contextual_content import ContextualContent from .types.ev_charging import EVChargeOptions, EVConnectorType from .types.fuel_options import FuelOptions from .types.geometry import Circle @@ -36,6 +38,7 @@ SearchTextRequest, SearchTextResponse, ) +from .types.reference import References from .types.review import Review __all__ = ( @@ -44,6 +47,8 @@ "AutocompletePlacesRequest", "AutocompletePlacesResponse", "Circle", + "ContentBlock", + "ContextualContent", "EVChargeOptions", "EVConnectorType", "FuelOptions", @@ -54,6 +59,7 @@ "Place", "PlacesClient", "PriceLevel", + "References", "Review", "SearchNearbyRequest", "SearchNearbyResponse", diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py index d66027db7a4f..f0536ab4a75e 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py @@ -48,6 +48,7 @@ from google.type import localized_text_pb2 # type: ignore from google.maps.places_v1.types import ( + contextual_content, ev_charging, fuel_options, photo, diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/client.py b/packages/google-maps-places/google/maps/places_v1/services/places/client.py index ac37bfd8e601..eb73f35475dc 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/client.py @@ -53,6 +53,7 @@ from google.type import localized_text_pb2 # type: ignore from google.maps.places_v1.types import ( + contextual_content, ev_charging, fuel_options, photo, diff --git a/packages/google-maps-places/google/maps/places_v1/types/__init__.py b/packages/google-maps-places/google/maps/places_v1/types/__init__.py index df93c4ed91ba..99b294c1dc25 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/__init__.py +++ b/packages/google-maps-places/google/maps/places_v1/types/__init__.py @@ -14,6 +14,8 @@ # limitations under the License. # from .attribution import AuthorAttribution +from .content_block import ContentBlock +from .contextual_content import ContextualContent from .ev_charging import EVChargeOptions, EVConnectorType from .fuel_options import FuelOptions from .geometry import Circle @@ -30,10 +32,13 @@ SearchTextRequest, SearchTextResponse, ) +from .reference import References from .review import Review __all__ = ( "AuthorAttribution", + "ContentBlock", + "ContextualContent", "EVChargeOptions", "EVConnectorType", "FuelOptions", @@ -50,5 +55,6 @@ "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "References", "Review", ) diff --git a/packages/google-maps-places/google/maps/places_v1/types/content_block.py b/packages/google-maps-places/google/maps/places_v1/types/content_block.py new file mode 100644 index 000000000000..dbe73a829aad --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/content_block.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import localized_text_pb2 # type: ignore +import proto # type: ignore + +from google.maps.places_v1.types import reference + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "ContentBlock", + }, +) + + +class ContentBlock(proto.Message): + r"""A block of content that can be served individually. + + Attributes: + topic (str): + The topic of the content, for example + "overview" or "restaurant". + content (google.type.localized_text_pb2.LocalizedText): + Content related to the topic. + references (google.maps.places_v1.types.References): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + References that are related to this block of + content. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + content: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=2, + message=localized_text_pb2.LocalizedText, + ) + references: reference.References = proto.Field( + proto.MESSAGE, + number=3, + message=reference.References, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py b/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py new file mode 100644 index 000000000000..137bb5f571f3 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py @@ -0,0 +1,221 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.maps.places_v1.types import photo +from google.maps.places_v1.types import review as gmp_review + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "ContextualContent", + }, +) + + +class ContextualContent(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + Content that is contextual to the place query. + + Attributes: + reviews (MutableSequence[google.maps.places_v1.types.Review]): + List of reviews about this place, contexual + to the place query. + photos (MutableSequence[google.maps.places_v1.types.Photo]): + Information (including references) about + photos of this place, contexual to the place + query. + justifications (MutableSequence[google.maps.places_v1.types.ContextualContent.Justification]): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + Justifications for the place. + """ + + class Justification(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + Justifications for the place. Justifications answers the + question of why a place could interest an end user. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + review_justification (google.maps.places_v1.types.ContextualContent.Justification.ReviewJustification): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + This field is a member of `oneof`_ ``justification``. + business_availability_attributes_justification (google.maps.places_v1.types.ContextualContent.Justification.BusinessAvailabilityAttributesJustification): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + This field is a member of `oneof`_ ``justification``. + """ + + class ReviewJustification(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + User review justifications. This highlights a section of the + user review that would interest an end user. For instance, if + the search query is "firewood pizza", the review justification + highlights the text relevant to the search query. + + Attributes: + highlighted_text (google.maps.places_v1.types.ContextualContent.Justification.ReviewJustification.HighlightedText): + + review (google.maps.places_v1.types.Review): + The review that the highlighted text is + generated from. + """ + + class HighlightedText(proto.Message): + r"""The text highlighted by the justification. This is a subset + of the review itself. The exact word to highlight is marked by + the HighlightedTextRange. There could be several words in the + text being highlighted. + + Attributes: + text (str): + + highlighted_text_ranges (MutableSequence[google.maps.places_v1.types.ContextualContent.Justification.ReviewJustification.HighlightedText.HighlightedTextRange]): + The list of the ranges of the highlighted + text. + """ + + class HighlightedTextRange(proto.Message): + r"""The range of highlighted text. + + Attributes: + start_index (int): + + end_index (int): + + """ + + start_index: int = proto.Field( + proto.INT32, + number=1, + ) + end_index: int = proto.Field( + proto.INT32, + number=2, + ) + + text: str = proto.Field( + proto.STRING, + number=1, + ) + highlighted_text_ranges: MutableSequence[ + "ContextualContent.Justification.ReviewJustification.HighlightedText.HighlightedTextRange" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ContextualContent.Justification.ReviewJustification.HighlightedText.HighlightedTextRange", + ) + + highlighted_text: "ContextualContent.Justification.ReviewJustification.HighlightedText" = proto.Field( + proto.MESSAGE, + number=1, + message="ContextualContent.Justification.ReviewJustification.HighlightedText", + ) + review: gmp_review.Review = proto.Field( + proto.MESSAGE, + number=2, + message=gmp_review.Review, + ) + + class BusinessAvailabilityAttributesJustification(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + BusinessAvailabilityAttributes justifications. This shows some + attributes a business has that could interest an end user. + + Attributes: + takeout (bool): + If a place provides takeout. + delivery (bool): + If a place provides delivery. + dine_in (bool): + If a place provides dine-in. + """ + + takeout: bool = proto.Field( + proto.BOOL, + number=1, + ) + delivery: bool = proto.Field( + proto.BOOL, + number=2, + ) + dine_in: bool = proto.Field( + proto.BOOL, + number=3, + ) + + review_justification: "ContextualContent.Justification.ReviewJustification" = ( + proto.Field( + proto.MESSAGE, + number=1, + oneof="justification", + message="ContextualContent.Justification.ReviewJustification", + ) + ) + business_availability_attributes_justification: "ContextualContent.Justification.BusinessAvailabilityAttributesJustification" = proto.Field( + proto.MESSAGE, + number=2, + oneof="justification", + message="ContextualContent.Justification.BusinessAvailabilityAttributesJustification", + ) + + reviews: MutableSequence[gmp_review.Review] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gmp_review.Review, + ) + photos: MutableSequence[photo.Photo] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=photo.Photo, + ) + justifications: MutableSequence[Justification] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Justification, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/place.py b/packages/google-maps-places/google/maps/places_v1/types/place.py index 6476164f3a0b..99c9e8f693e9 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/place.py +++ b/packages/google-maps-places/google/maps/places_v1/types/place.py @@ -23,9 +23,9 @@ from google.type import localized_text_pb2 # type: ignore import proto # type: ignore -from google.maps.places_v1.types import ev_charging +from google.maps.places_v1.types import content_block, ev_charging from google.maps.places_v1.types import fuel_options as gmp_fuel_options -from google.maps.places_v1.types import photo, review +from google.maps.places_v1.types import photo, reference, review __protobuf__ = proto.module( package="google.maps.places.v1", @@ -333,6 +333,19 @@ class Place(proto.Message): updated regularly. ev_charge_options (google.maps.places_v1.types.EVChargeOptions): Information of ev charging options. + generative_summary (google.maps.places_v1.types.Place.GenerativeSummary): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + AI-generated summary of the place. + area_summary (google.maps.places_v1.types.Place.AreaSummary): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + AI-generated summary of the area that the place + is in. """ class BusinessStatus(proto.Enum): @@ -832,6 +845,60 @@ class AccessibilityOptions(proto.Message): optional=True, ) + class GenerativeSummary(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + AI-generated summary of the place. + + Attributes: + overview (google.type.localized_text_pb2.LocalizedText): + The overview of the place. + description (google.type.localized_text_pb2.LocalizedText): + The detailed description of the place. + references (google.maps.places_v1.types.References): + References that are used to generate the + summary description. + """ + + overview: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=1, + message=localized_text_pb2.LocalizedText, + ) + description: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=2, + message=localized_text_pb2.LocalizedText, + ) + references: reference.References = proto.Field( + proto.MESSAGE, + number=3, + message=reference.References, + ) + + class AreaSummary(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + AI-generated summary of the area that the place is in. + + Attributes: + content_blocks (MutableSequence[google.maps.places_v1.types.ContentBlock]): + Content blocks that compose the area summary. + Each block has a separate topic about the area. + """ + + content_blocks: MutableSequence[ + content_block.ContentBlock + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=content_block.ContentBlock, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1128,6 +1195,16 @@ class AccessibilityOptions(proto.Message): number=79, message=ev_charging.EVChargeOptions, ) + generative_summary: GenerativeSummary = proto.Field( + proto.MESSAGE, + number=80, + message=GenerativeSummary, + ) + area_summary: AreaSummary = proto.Field( + proto.MESSAGE, + number=81, + message=AreaSummary, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/places_service.py b/packages/google-maps-places/google/maps/places_v1/types/places_service.py index b77a49ec2c07..c02ec339b860 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/places_service.py +++ b/packages/google-maps-places/google/maps/places_v1/types/places_service.py @@ -21,7 +21,7 @@ from google.type import latlng_pb2 # type: ignore import proto # type: ignore -from google.maps.places_v1.types import ev_charging, geometry +from google.maps.places_v1.types import contextual_content, ev_charging, geometry from google.maps.places_v1.types import place as gmp_place __protobuf__ = proto.module( @@ -511,6 +511,19 @@ class SearchTextResponse(proto.Message): places (MutableSequence[google.maps.places_v1.types.Place]): A list of places that meet the user's text search criteria. + contextual_contents (MutableSequence[google.maps.places_v1.types.ContextualContent]): + Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + A list of contextual contents where each entry associates to + the corresponding place in the same index in the places + field. The contents that are relevant to the ``text_query`` + in the request are preferred. If the contextual content is + not available for one of the places, it will return + non-contextual content. It will be empty only when the + content is unavailable for this place. This list should have + as many entries as the list of places if requested. """ places: MutableSequence[gmp_place.Place] = proto.RepeatedField( @@ -518,6 +531,13 @@ class SearchTextResponse(proto.Message): number=1, message=gmp_place.Place, ) + contextual_contents: MutableSequence[ + contextual_content.ContextualContent + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=contextual_content.ContextualContent, + ) class GetPhotoMediaRequest(proto.Message): @@ -708,11 +728,12 @@ class AutocompletePlacesRequest(proto.Message): to an imprecise location and used as a biasing signal. included_primary_types (MutableSequence[str]): Optional. Included primary Place type (for example, - "restaurant" or "gas_station") from - https://developers.google.com/maps/documentation/places/web-service/place-types. - A Place is only returned if its primary type is included in - this list. Up to 5 values can be specified. If no types are - specified, all Place types are returned. + "restaurant" or "gas_station") in Place Types + (https://developers.google.com/maps/documentation/places/web-service/place-types), + or only ``(regions)``, or only ``(cities)``. A Place is only + returned if its primary type is included in this list. Up to + 5 values can be specified. If no types are specified, all + Place types are returned. included_region_codes (MutableSequence[str]): Optional. Only include results in the specified regions, specified as up to 15 CLDR two-character region codes. An diff --git a/packages/google-maps-places/google/maps/places_v1/types/reference.py b/packages/google-maps-places/google/maps/places_v1/types/reference.py new file mode 100644 index 000000000000..e540141a1e71 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/reference.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.maps.places_v1.types import review + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "References", + }, +) + + +class References(proto.Message): + r"""Experimental: See + https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative + for more details. + + Reference that the generative content is related to. + + Attributes: + reviews (MutableSequence[google.maps.places_v1.types.Review]): + Reviews that serve as references. + places (MutableSequence[str]): + The list of resource names of the referenced + places. This name can be used in other APIs that + accept Place resource names. + """ + + reviews: MutableSequence[review.Review] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=review.Review, + ) + places: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py index d1798dda862b..23ea057174fe 100644 --- a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py @@ -51,6 +51,7 @@ transports, ) from google.maps.places_v1.types import ( + contextual_content, ev_charging, fuel_options, geometry, diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-css/CHANGELOG.md b/packages/google-shopping-css/CHANGELOG.md index 18911e0be7b4..aa1b0ed5d72f 100644 --- a/packages/google-shopping-css/CHANGELOG.md +++ b/packages/google-shopping-css/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.4...google-shopping-css-v0.1.5) (2024-06-19) + + +### Documentation + +* Remove "in Google Shopping" from documentation comments ([3a0a439](https://github.com/googleapis/google-cloud-python/commit/3a0a439ce9e43f88959babfa267e14bae10f8538)) + ## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.3...google-shopping-css-v0.1.4) (2024-03-05) diff --git a/packages/google-shopping-css/google/shopping/css/gapic_version.py b/packages/google-shopping-css/google/shopping/css/gapic_version.py index 558c8aab67c5..e9c4bb5650f3 100644 --- a/packages/google-shopping-css/google/shopping/css/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.5" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py index 558c8aab67c5..e9c4bb5650f3 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.5" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py b/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py index 7e52e100b18f..cec3dd7281db 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py +++ b/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py @@ -211,8 +211,7 @@ class Attributes(proto.Message): Date on which the item should expire, as specified upon insertion, in `ISO 8601 `__ format. The - actual expiration date in Google Shopping is exposed in - ``productstatuses`` as + actual expiration date is exposed in ``productstatuses`` as `googleExpirationDate `__ and might be earlier if ``expirationDate`` is too far in the future. Note: It may take 2+ days from the expiration date @@ -616,7 +615,7 @@ class CssProductStatus(proto.Message): Date on which the item has been last updated, in `ISO 8601 `__ format. google_expiration_date (google.protobuf.timestamp_pb2.Timestamp): - Date on which the item expires in Google Shopping, in `ISO + Date on which the item expires, in `ISO 8601 `__ format. """ diff --git a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json index 5b682921bee5..2341456fc043 100644 --- a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json +++ b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-css", - "version": "0.1.0" + "version": "0.1.5" }, "snippets": [ { diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py index 541ac45b95f3..fd8bf51486da 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py @@ -1617,13 +1617,13 @@ def test_list_account_labels_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_account_labels(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py index d1b0fea57059..b9f9e74c1b14 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py @@ -1574,13 +1574,13 @@ def test_list_child_accounts_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_child_accounts(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py index d42a8c3e42de..d775db533894 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py @@ -1973,13 +1973,13 @@ def test_list_css_products_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_css_products(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-accounts/.OwlBot.yaml b/packages/google-shopping-merchant-accounts/.OwlBot.yaml new file mode 100644 index 000000000000..6f4a84bd73c1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/shopping/merchant/accounts/(v.*)/.*-py + dest: /owl-bot-staging/google-shopping-merchant-accounts/$1 +api-name: google-shopping-merchant-accounts diff --git a/packages/google-shopping-merchant-accounts/.coveragerc b/packages/google-shopping-merchant-accounts/.coveragerc new file mode 100644 index 000000000000..8338bbe3403b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/shopping/merchant_accounts/__init__.py + google/shopping/merchant_accounts/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-shopping-merchant-accounts/.flake8 b/packages/google-shopping-merchant-accounts/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-shopping-merchant-accounts/.gitignore b/packages/google-shopping-merchant-accounts/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-shopping-merchant-accounts/.repo-metadata.json b/packages/google-shopping-merchant-accounts/.repo-metadata.json new file mode 100644 index 000000000000..ec64b6be52ac --- /dev/null +++ b/packages/google-shopping-merchant-accounts/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-shopping-merchant-accounts", + "name_pretty": "Merchant API", + "api_description": "Programmatically manage your Merchant Center accounts.", + "product_documentation": "https://developers.google.com/merchant/api", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-accounts/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-shopping-merchant-accounts", + "api_id": "accounts.googleapis.com", + "default_version": "v1beta", + "codeowner_team": "", + "api_shortname": "accounts" +} diff --git a/packages/google-shopping-merchant-accounts/CHANGELOG.md b/packages/google-shopping-merchant-accounts/CHANGELOG.md new file mode 100644 index 000000000000..d3771d1a6031 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-accounts-v0.1.0...google-shopping-merchant-accounts-v0.1.1) (2024-06-10) + + +### Documentation + +* [google-shopping-merchant-accounts] Format comments in ListUsersRequest ([#12786](https://github.com/googleapis/google-cloud-python/issues/12786)) ([be7afbb](https://github.com/googleapis/google-cloud-python/commit/be7afbbffe243120fc616fd5d80a6d86197653cf)) + +## 0.1.0 (2024-06-05) + + +### Features + +* add initial files for google.shopping.merchant.accounts.v1beta ([#12773](https://github.com/googleapis/google-cloud-python/issues/12773)) ([108875d](https://github.com/googleapis/google-cloud-python/commit/108875d1a38f31013ed98feddbef61cfb09e1d16)) + +## Changelog diff --git a/packages/google-shopping-merchant-accounts/CODE_OF_CONDUCT.md b/packages/google-shopping-merchant-accounts/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-shopping-merchant-accounts/CONTRIBUTING.rst b/packages/google-shopping-merchant-accounts/CONTRIBUTING.rst new file mode 100644 index 000000000000..bd53f587099a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-shopping-merchant-accounts + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-shopping-merchant-accounts/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-shopping-merchant-accounts/LICENSE b/packages/google-shopping-merchant-accounts/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-shopping-merchant-accounts/MANIFEST.in b/packages/google-shopping-merchant-accounts/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-shopping-merchant-accounts/README.rst b/packages/google-shopping-merchant-accounts/README.rst new file mode 100644 index 000000000000..0645f3a3b275 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/README.rst @@ -0,0 +1,108 @@ +Python Client for Merchant API +============================== + +|preview| |pypi| |versions| + +`Merchant API`_: Programmatically manage your Merchant Center accounts. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-shopping-merchant-accounts.svg + :target: https://pypi.org/project/google-shopping-merchant-accounts/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-shopping-merchant-accounts.svg + :target: https://pypi.org/project/google-shopping-merchant-accounts/ +.. _Merchant API: https://developers.google.com/merchant/api +.. _Client Library Documentation: https://googleapis.dev/python/google-shopping-merchant-accounts/latest +.. _Product Documentation: https://developers.google.com/merchant/api + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Merchant API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Merchant API.: https://developers.google.com/merchant/api +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-shopping-merchant-accounts + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-shopping-merchant-accounts + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Merchant API + to see other available methods on the client. +- Read the `Merchant API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Merchant API Product documentation: https://developers.google.com/merchant/api +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-shopping-merchant-accounts/docs/CHANGELOG.md b/packages/google-shopping-merchant-accounts/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-shopping-merchant-accounts/docs/README.rst b/packages/google-shopping-merchant-accounts/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-shopping-merchant-accounts/docs/_static/custom.css b/packages/google-shopping-merchant-accounts/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-shopping-merchant-accounts/docs/_templates/layout.html b/packages/google-shopping-merchant-accounts/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-shopping-merchant-accounts/docs/conf.py b/packages/google-shopping-merchant-accounts/docs/conf.py new file mode 100644 index 000000000000..83da92ecec22 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-shopping-merchant-accounts documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-shopping-merchant-accounts" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-shopping-merchant-accounts", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-shopping-merchant-accounts-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-shopping-merchant-accounts.tex", + "google-shopping-merchant-accounts Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-shopping-merchant-accounts", + "google-shopping-merchant-accounts Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-shopping-merchant-accounts", + "google-shopping-merchant-accounts Documentation", + author, + "google-shopping-merchant-accounts", + "google-shopping-merchant-accounts Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-shopping-merchant-accounts/docs/index.rst b/packages/google-shopping-merchant-accounts/docs/index.rst new file mode 100644 index 000000000000..12d31a16b828 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + merchant_accounts_v1beta/services_ + merchant_accounts_v1beta/types_ + + +Changelog +--------- + +For a list of all ``google-shopping-merchant-accounts`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/account_issue_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/account_issue_service.rst new file mode 100644 index 000000000000..a5f41b61ad72 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/account_issue_service.rst @@ -0,0 +1,10 @@ +AccountIssueService +------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.account_issue_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.account_issue_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/account_tax_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/account_tax_service.rst new file mode 100644 index 000000000000..1693b3a3f8bd --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/account_tax_service.rst @@ -0,0 +1,10 @@ +AccountTaxService +----------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.account_tax_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.account_tax_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/accounts_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/accounts_service.rst new file mode 100644 index 000000000000..4b22494597e3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/accounts_service.rst @@ -0,0 +1,10 @@ +AccountsService +--------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.accounts_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/business_identity_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/business_identity_service.rst new file mode 100644 index 000000000000..a80c213d4edf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/business_identity_service.rst @@ -0,0 +1,6 @@ +BusinessIdentityService +----------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.business_identity_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/business_info_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/business_info_service.rst new file mode 100644 index 000000000000..f5aa91c9d676 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/business_info_service.rst @@ -0,0 +1,6 @@ +BusinessInfoService +------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.business_info_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/email_preferences_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/email_preferences_service.rst new file mode 100644 index 000000000000..28be8558fb89 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/email_preferences_service.rst @@ -0,0 +1,6 @@ +EmailPreferencesService +----------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.email_preferences_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/homepage_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/homepage_service.rst new file mode 100644 index 000000000000..ce83ca532741 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/homepage_service.rst @@ -0,0 +1,6 @@ +HomepageService +--------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.homepage_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/online_return_policy_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/online_return_policy_service.rst new file mode 100644 index 000000000000..4ebc9d8a8966 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/online_return_policy_service.rst @@ -0,0 +1,10 @@ +OnlineReturnPolicyService +------------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.online_return_policy_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/programs_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/programs_service.rst new file mode 100644 index 000000000000..b69123082e7d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/programs_service.rst @@ -0,0 +1,10 @@ +ProgramsService +--------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.programs_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.programs_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/regions_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/regions_service.rst new file mode 100644 index 000000000000..5516e7ccd805 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/regions_service.rst @@ -0,0 +1,10 @@ +RegionsService +-------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.regions_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.regions_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst new file mode 100644 index 000000000000..1c502de92ff3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst @@ -0,0 +1,19 @@ +Services for Google Shopping Merchant Accounts v1beta API +========================================================= +.. toctree:: + :maxdepth: 2 + + account_issue_service + accounts_service + account_tax_service + business_identity_service + business_info_service + email_preferences_service + homepage_service + online_return_policy_service + programs_service + regions_service + shipping_settings_service + terms_of_service_agreement_state_service + terms_of_service_service + user_service diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/shipping_settings_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/shipping_settings_service.rst new file mode 100644 index 000000000000..95f333afb030 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/shipping_settings_service.rst @@ -0,0 +1,6 @@ +ShippingSettingsService +----------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.shipping_settings_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/terms_of_service_agreement_state_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/terms_of_service_agreement_state_service.rst new file mode 100644 index 000000000000..ea377121c221 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/terms_of_service_agreement_state_service.rst @@ -0,0 +1,6 @@ +TermsOfServiceAgreementStateService +----------------------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/terms_of_service_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/terms_of_service_service.rst new file mode 100644 index 000000000000..2aae813b00c9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/terms_of_service_service.rst @@ -0,0 +1,6 @@ +TermsOfServiceService +--------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.terms_of_service_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/types_.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/types_.rst new file mode 100644 index 000000000000..3a0ebb568c5a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Shopping Merchant Accounts v1beta API +====================================================== + +.. automodule:: google.shopping.merchant_accounts_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/user_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/user_service.rst new file mode 100644 index 000000000000..4179b3c56145 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/user_service.rst @@ -0,0 +1,10 @@ +UserService +----------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.user_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.user_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/multiprocessing.rst b/packages/google-shopping-merchant-accounts/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py new file mode 100644 index 000000000000..97bb9de71cdf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py @@ -0,0 +1,349 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_accounts import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.shopping.merchant_accounts_v1beta.services.account_issue_service.async_client import ( + AccountIssueServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.account_issue_service.client import ( + AccountIssueServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.account_tax_service.async_client import ( + AccountTaxServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.account_tax_service.client import ( + AccountTaxServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.accounts_service.async_client import ( + AccountsServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.accounts_service.client import ( + AccountsServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.business_identity_service.async_client import ( + BusinessIdentityServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.business_identity_service.client import ( + BusinessIdentityServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.business_info_service.async_client import ( + BusinessInfoServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.business_info_service.client import ( + BusinessInfoServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.email_preferences_service.async_client import ( + EmailPreferencesServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.email_preferences_service.client import ( + EmailPreferencesServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.homepage_service.async_client import ( + HomepageServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.homepage_service.client import ( + HomepageServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.async_client import ( + OnlineReturnPolicyServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.client import ( + OnlineReturnPolicyServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.programs_service.async_client import ( + ProgramsServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.programs_service.client import ( + ProgramsServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.regions_service.async_client import ( + RegionsServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.regions_service.client import ( + RegionsServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.shipping_settings_service.async_client import ( + ShippingSettingsServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.shipping_settings_service.client import ( + ShippingSettingsServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service.async_client import ( + TermsOfServiceAgreementStateServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service.client import ( + TermsOfServiceAgreementStateServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.terms_of_service_service.async_client import ( + TermsOfServiceServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.terms_of_service_service.client import ( + TermsOfServiceServiceClient, +) +from google.shopping.merchant_accounts_v1beta.services.user_service.async_client import ( + UserServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.user_service.client import ( + UserServiceClient, +) +from google.shopping.merchant_accounts_v1beta.types.accessright import AccessRight +from google.shopping.merchant_accounts_v1beta.types.account_tax import ( + AccountTax, + GetAccountTaxRequest, + ListAccountTaxRequest, + ListAccountTaxResponse, + UpdateAccountTaxRequest, +) +from google.shopping.merchant_accounts_v1beta.types.accountissue import ( + AccountIssue, + ListAccountIssuesRequest, + ListAccountIssuesResponse, +) +from google.shopping.merchant_accounts_v1beta.types.accounts import ( + Account, + CreateAndConfigureAccountRequest, + DeleteAccountRequest, + GetAccountRequest, + ListAccountsRequest, + ListAccountsResponse, + ListSubAccountsRequest, + ListSubAccountsResponse, + UpdateAccountRequest, +) +from google.shopping.merchant_accounts_v1beta.types.businessidentity import ( + BusinessIdentity, + GetBusinessIdentityRequest, + UpdateBusinessIdentityRequest, +) +from google.shopping.merchant_accounts_v1beta.types.businessinfo import ( + BusinessInfo, + GetBusinessInfoRequest, + UpdateBusinessInfoRequest, +) +from google.shopping.merchant_accounts_v1beta.types.customerservice import ( + CustomerService, +) +from google.shopping.merchant_accounts_v1beta.types.emailpreferences import ( + EmailPreferences, + GetEmailPreferencesRequest, + UpdateEmailPreferencesRequest, +) +from google.shopping.merchant_accounts_v1beta.types.homepage import ( + ClaimHomepageRequest, + GetHomepageRequest, + Homepage, + UnclaimHomepageRequest, + UpdateHomepageRequest, +) +from google.shopping.merchant_accounts_v1beta.types.online_return_policy import ( + GetOnlineReturnPolicyRequest, + ListOnlineReturnPoliciesRequest, + ListOnlineReturnPoliciesResponse, + OnlineReturnPolicy, +) +from google.shopping.merchant_accounts_v1beta.types.phoneverificationstate import ( + PhoneVerificationState, +) +from google.shopping.merchant_accounts_v1beta.types.programs import ( + DisableProgramRequest, + EnableProgramRequest, + GetProgramRequest, + ListProgramsRequest, + ListProgramsResponse, + Program, +) +from google.shopping.merchant_accounts_v1beta.types.regions import ( + CreateRegionRequest, + DeleteRegionRequest, + GetRegionRequest, + ListRegionsRequest, + ListRegionsResponse, + Region, + UpdateRegionRequest, +) +from google.shopping.merchant_accounts_v1beta.types.shippingsettings import ( + Address, + BusinessDayConfig, + CarrierRate, + CutoffTime, + DeliveryTime, + Distance, + GetShippingSettingsRequest, + Headers, + InsertShippingSettingsRequest, + LocationIdSet, + MinimumOrderValueTable, + RateGroup, + Row, + Service, + ShippingSettings, + Table, + TransitTable, + Value, + Warehouse, + WarehouseBasedDeliveryTime, + WarehouseCutoffTime, +) +from google.shopping.merchant_accounts_v1beta.types.tax_rule import TaxRule +from google.shopping.merchant_accounts_v1beta.types.termsofservice import ( + AcceptTermsOfServiceRequest, + GetTermsOfServiceRequest, + RetrieveLatestTermsOfServiceRequest, + TermsOfService, +) +from google.shopping.merchant_accounts_v1beta.types.termsofserviceagreementstate import ( + Accepted, + GetTermsOfServiceAgreementStateRequest, + Required, + RetrieveForApplicationTermsOfServiceAgreementStateRequest, + TermsOfServiceAgreementState, +) +from google.shopping.merchant_accounts_v1beta.types.termsofservicekind import ( + TermsOfServiceKind, +) +from google.shopping.merchant_accounts_v1beta.types.user import ( + CreateUserRequest, + DeleteUserRequest, + GetUserRequest, + ListUsersRequest, + ListUsersResponse, + UpdateUserRequest, + User, +) + +__all__ = ( + "AccountIssueServiceClient", + "AccountIssueServiceAsyncClient", + "AccountsServiceClient", + "AccountsServiceAsyncClient", + "AccountTaxServiceClient", + "AccountTaxServiceAsyncClient", + "BusinessIdentityServiceClient", + "BusinessIdentityServiceAsyncClient", + "BusinessInfoServiceClient", + "BusinessInfoServiceAsyncClient", + "EmailPreferencesServiceClient", + "EmailPreferencesServiceAsyncClient", + "HomepageServiceClient", + "HomepageServiceAsyncClient", + "OnlineReturnPolicyServiceClient", + "OnlineReturnPolicyServiceAsyncClient", + "ProgramsServiceClient", + "ProgramsServiceAsyncClient", + "RegionsServiceClient", + "RegionsServiceAsyncClient", + "ShippingSettingsServiceClient", + "ShippingSettingsServiceAsyncClient", + "TermsOfServiceAgreementStateServiceClient", + "TermsOfServiceAgreementStateServiceAsyncClient", + "TermsOfServiceServiceClient", + "TermsOfServiceServiceAsyncClient", + "UserServiceClient", + "UserServiceAsyncClient", + "AccessRight", + "AccountTax", + "GetAccountTaxRequest", + "ListAccountTaxRequest", + "ListAccountTaxResponse", + "UpdateAccountTaxRequest", + "AccountIssue", + "ListAccountIssuesRequest", + "ListAccountIssuesResponse", + "Account", + "CreateAndConfigureAccountRequest", + "DeleteAccountRequest", + "GetAccountRequest", + "ListAccountsRequest", + "ListAccountsResponse", + "ListSubAccountsRequest", + "ListSubAccountsResponse", + "UpdateAccountRequest", + "BusinessIdentity", + "GetBusinessIdentityRequest", + "UpdateBusinessIdentityRequest", + "BusinessInfo", + "GetBusinessInfoRequest", + "UpdateBusinessInfoRequest", + "CustomerService", + "EmailPreferences", + "GetEmailPreferencesRequest", + "UpdateEmailPreferencesRequest", + "ClaimHomepageRequest", + "GetHomepageRequest", + "Homepage", + "UnclaimHomepageRequest", + "UpdateHomepageRequest", + "GetOnlineReturnPolicyRequest", + "ListOnlineReturnPoliciesRequest", + "ListOnlineReturnPoliciesResponse", + "OnlineReturnPolicy", + "PhoneVerificationState", + "DisableProgramRequest", + "EnableProgramRequest", + "GetProgramRequest", + "ListProgramsRequest", + "ListProgramsResponse", + "Program", + "CreateRegionRequest", + "DeleteRegionRequest", + "GetRegionRequest", + "ListRegionsRequest", + "ListRegionsResponse", + "Region", + "UpdateRegionRequest", + "Address", + "BusinessDayConfig", + "CarrierRate", + "CutoffTime", + "DeliveryTime", + "Distance", + "GetShippingSettingsRequest", + "Headers", + "InsertShippingSettingsRequest", + "LocationIdSet", + "MinimumOrderValueTable", + "RateGroup", + "Row", + "Service", + "ShippingSettings", + "Table", + "TransitTable", + "Value", + "Warehouse", + "WarehouseBasedDeliveryTime", + "WarehouseCutoffTime", + "TaxRule", + "AcceptTermsOfServiceRequest", + "GetTermsOfServiceRequest", + "RetrieveLatestTermsOfServiceRequest", + "TermsOfService", + "Accepted", + "GetTermsOfServiceAgreementStateRequest", + "Required", + "RetrieveForApplicationTermsOfServiceAgreementStateRequest", + "TermsOfServiceAgreementState", + "TermsOfServiceKind", + "CreateUserRequest", + "DeleteUserRequest", + "GetUserRequest", + "ListUsersRequest", + "ListUsersResponse", + "UpdateUserRequest", + "User", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py new file mode 100644 index 000000000000..0c7cc68730c4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/py.typed b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/py.typed new file mode 100644 index 000000000000..19aa2588b0f7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-accounts package uses inline types. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py new file mode 100644 index 000000000000..645cb37080cd --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.account_issue_service import ( + AccountIssueServiceAsyncClient, + AccountIssueServiceClient, +) +from .services.account_tax_service import ( + AccountTaxServiceAsyncClient, + AccountTaxServiceClient, +) +from .services.accounts_service import AccountsServiceAsyncClient, AccountsServiceClient +from .services.business_identity_service import ( + BusinessIdentityServiceAsyncClient, + BusinessIdentityServiceClient, +) +from .services.business_info_service import ( + BusinessInfoServiceAsyncClient, + BusinessInfoServiceClient, +) +from .services.email_preferences_service import ( + EmailPreferencesServiceAsyncClient, + EmailPreferencesServiceClient, +) +from .services.homepage_service import HomepageServiceAsyncClient, HomepageServiceClient +from .services.online_return_policy_service import ( + OnlineReturnPolicyServiceAsyncClient, + OnlineReturnPolicyServiceClient, +) +from .services.programs_service import ProgramsServiceAsyncClient, ProgramsServiceClient +from .services.regions_service import RegionsServiceAsyncClient, RegionsServiceClient +from .services.shipping_settings_service import ( + ShippingSettingsServiceAsyncClient, + ShippingSettingsServiceClient, +) +from .services.terms_of_service_agreement_state_service import ( + TermsOfServiceAgreementStateServiceAsyncClient, + TermsOfServiceAgreementStateServiceClient, +) +from .services.terms_of_service_service import ( + TermsOfServiceServiceAsyncClient, + TermsOfServiceServiceClient, +) +from .services.user_service import UserServiceAsyncClient, UserServiceClient +from .types.accessright import AccessRight +from .types.account_tax import ( + AccountTax, + GetAccountTaxRequest, + ListAccountTaxRequest, + ListAccountTaxResponse, + UpdateAccountTaxRequest, +) +from .types.accountissue import ( + AccountIssue, + ListAccountIssuesRequest, + ListAccountIssuesResponse, +) +from .types.accounts import ( + Account, + CreateAndConfigureAccountRequest, + DeleteAccountRequest, + GetAccountRequest, + ListAccountsRequest, + ListAccountsResponse, + ListSubAccountsRequest, + ListSubAccountsResponse, + UpdateAccountRequest, +) +from .types.businessidentity import ( + BusinessIdentity, + GetBusinessIdentityRequest, + UpdateBusinessIdentityRequest, +) +from .types.businessinfo import ( + BusinessInfo, + GetBusinessInfoRequest, + UpdateBusinessInfoRequest, +) +from .types.customerservice import CustomerService +from .types.emailpreferences import ( + EmailPreferences, + GetEmailPreferencesRequest, + UpdateEmailPreferencesRequest, +) +from .types.homepage import ( + ClaimHomepageRequest, + GetHomepageRequest, + Homepage, + UnclaimHomepageRequest, + UpdateHomepageRequest, +) +from .types.online_return_policy import ( + GetOnlineReturnPolicyRequest, + ListOnlineReturnPoliciesRequest, + ListOnlineReturnPoliciesResponse, + OnlineReturnPolicy, +) +from .types.phoneverificationstate import PhoneVerificationState +from .types.programs import ( + DisableProgramRequest, + EnableProgramRequest, + GetProgramRequest, + ListProgramsRequest, + ListProgramsResponse, + Program, +) +from .types.regions import ( + CreateRegionRequest, + DeleteRegionRequest, + GetRegionRequest, + ListRegionsRequest, + ListRegionsResponse, + Region, + UpdateRegionRequest, +) +from .types.shippingsettings import ( + Address, + BusinessDayConfig, + CarrierRate, + CutoffTime, + DeliveryTime, + Distance, + GetShippingSettingsRequest, + Headers, + InsertShippingSettingsRequest, + LocationIdSet, + MinimumOrderValueTable, + RateGroup, + Row, + Service, + ShippingSettings, + Table, + TransitTable, + Value, + Warehouse, + WarehouseBasedDeliveryTime, + WarehouseCutoffTime, +) +from .types.tax_rule import TaxRule +from .types.termsofservice import ( + AcceptTermsOfServiceRequest, + GetTermsOfServiceRequest, + RetrieveLatestTermsOfServiceRequest, + TermsOfService, +) +from .types.termsofserviceagreementstate import ( + Accepted, + GetTermsOfServiceAgreementStateRequest, + Required, + RetrieveForApplicationTermsOfServiceAgreementStateRequest, + TermsOfServiceAgreementState, +) +from .types.termsofservicekind import TermsOfServiceKind +from .types.user import ( + CreateUserRequest, + DeleteUserRequest, + GetUserRequest, + ListUsersRequest, + ListUsersResponse, + UpdateUserRequest, + User, +) + +__all__ = ( + "AccountIssueServiceAsyncClient", + "AccountTaxServiceAsyncClient", + "AccountsServiceAsyncClient", + "BusinessIdentityServiceAsyncClient", + "BusinessInfoServiceAsyncClient", + "EmailPreferencesServiceAsyncClient", + "HomepageServiceAsyncClient", + "OnlineReturnPolicyServiceAsyncClient", + "ProgramsServiceAsyncClient", + "RegionsServiceAsyncClient", + "ShippingSettingsServiceAsyncClient", + "TermsOfServiceAgreementStateServiceAsyncClient", + "TermsOfServiceServiceAsyncClient", + "UserServiceAsyncClient", + "AcceptTermsOfServiceRequest", + "Accepted", + "AccessRight", + "Account", + "AccountIssue", + "AccountIssueServiceClient", + "AccountTax", + "AccountTaxServiceClient", + "AccountsServiceClient", + "Address", + "BusinessDayConfig", + "BusinessIdentity", + "BusinessIdentityServiceClient", + "BusinessInfo", + "BusinessInfoServiceClient", + "CarrierRate", + "ClaimHomepageRequest", + "CreateAndConfigureAccountRequest", + "CreateRegionRequest", + "CreateUserRequest", + "CustomerService", + "CutoffTime", + "DeleteAccountRequest", + "DeleteRegionRequest", + "DeleteUserRequest", + "DeliveryTime", + "DisableProgramRequest", + "Distance", + "EmailPreferences", + "EmailPreferencesServiceClient", + "EnableProgramRequest", + "GetAccountRequest", + "GetAccountTaxRequest", + "GetBusinessIdentityRequest", + "GetBusinessInfoRequest", + "GetEmailPreferencesRequest", + "GetHomepageRequest", + "GetOnlineReturnPolicyRequest", + "GetProgramRequest", + "GetRegionRequest", + "GetShippingSettingsRequest", + "GetTermsOfServiceAgreementStateRequest", + "GetTermsOfServiceRequest", + "GetUserRequest", + "Headers", + "Homepage", + "HomepageServiceClient", + "InsertShippingSettingsRequest", + "ListAccountIssuesRequest", + "ListAccountIssuesResponse", + "ListAccountTaxRequest", + "ListAccountTaxResponse", + "ListAccountsRequest", + "ListAccountsResponse", + "ListOnlineReturnPoliciesRequest", + "ListOnlineReturnPoliciesResponse", + "ListProgramsRequest", + "ListProgramsResponse", + "ListRegionsRequest", + "ListRegionsResponse", + "ListSubAccountsRequest", + "ListSubAccountsResponse", + "ListUsersRequest", + "ListUsersResponse", + "LocationIdSet", + "MinimumOrderValueTable", + "OnlineReturnPolicy", + "OnlineReturnPolicyServiceClient", + "PhoneVerificationState", + "Program", + "ProgramsServiceClient", + "RateGroup", + "Region", + "RegionsServiceClient", + "Required", + "RetrieveForApplicationTermsOfServiceAgreementStateRequest", + "RetrieveLatestTermsOfServiceRequest", + "Row", + "Service", + "ShippingSettings", + "ShippingSettingsServiceClient", + "Table", + "TaxRule", + "TermsOfService", + "TermsOfServiceAgreementState", + "TermsOfServiceAgreementStateServiceClient", + "TermsOfServiceKind", + "TermsOfServiceServiceClient", + "TransitTable", + "UnclaimHomepageRequest", + "UpdateAccountRequest", + "UpdateAccountTaxRequest", + "UpdateBusinessIdentityRequest", + "UpdateBusinessInfoRequest", + "UpdateEmailPreferencesRequest", + "UpdateHomepageRequest", + "UpdateRegionRequest", + "UpdateUserRequest", + "User", + "UserServiceClient", + "Value", + "Warehouse", + "WarehouseBasedDeliveryTime", + "WarehouseCutoffTime", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..3823aac9f4fa --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json @@ -0,0 +1,920 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.shopping.merchant_accounts_v1beta", + "protoPackage": "google.shopping.merchant.accounts.v1beta", + "schema": "1.0", + "services": { + "AccountIssueService": { + "clients": { + "grpc": { + "libraryClient": "AccountIssueServiceClient", + "rpcs": { + "ListAccountIssues": { + "methods": [ + "list_account_issues" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AccountIssueServiceAsyncClient", + "rpcs": { + "ListAccountIssues": { + "methods": [ + "list_account_issues" + ] + } + } + }, + "rest": { + "libraryClient": "AccountIssueServiceClient", + "rpcs": { + "ListAccountIssues": { + "methods": [ + "list_account_issues" + ] + } + } + } + } + }, + "AccountTaxService": { + "clients": { + "grpc": { + "libraryClient": "AccountTaxServiceClient", + "rpcs": { + "GetAccountTax": { + "methods": [ + "get_account_tax" + ] + }, + "ListAccountTax": { + "methods": [ + "list_account_tax" + ] + }, + "UpdateAccountTax": { + "methods": [ + "update_account_tax" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AccountTaxServiceAsyncClient", + "rpcs": { + "GetAccountTax": { + "methods": [ + "get_account_tax" + ] + }, + "ListAccountTax": { + "methods": [ + "list_account_tax" + ] + }, + "UpdateAccountTax": { + "methods": [ + "update_account_tax" + ] + } + } + }, + "rest": { + "libraryClient": "AccountTaxServiceClient", + "rpcs": { + "GetAccountTax": { + "methods": [ + "get_account_tax" + ] + }, + "ListAccountTax": { + "methods": [ + "list_account_tax" + ] + }, + "UpdateAccountTax": { + "methods": [ + "update_account_tax" + ] + } + } + } + } + }, + "AccountsService": { + "clients": { + "grpc": { + "libraryClient": "AccountsServiceClient", + "rpcs": { + "CreateAndConfigureAccount": { + "methods": [ + "create_and_configure_account" + ] + }, + "DeleteAccount": { + "methods": [ + "delete_account" + ] + }, + "GetAccount": { + "methods": [ + "get_account" + ] + }, + "ListAccounts": { + "methods": [ + "list_accounts" + ] + }, + "ListSubAccounts": { + "methods": [ + "list_sub_accounts" + ] + }, + "UpdateAccount": { + "methods": [ + "update_account" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AccountsServiceAsyncClient", + "rpcs": { + "CreateAndConfigureAccount": { + "methods": [ + "create_and_configure_account" + ] + }, + "DeleteAccount": { + "methods": [ + "delete_account" + ] + }, + "GetAccount": { + "methods": [ + "get_account" + ] + }, + "ListAccounts": { + "methods": [ + "list_accounts" + ] + }, + "ListSubAccounts": { + "methods": [ + "list_sub_accounts" + ] + }, + "UpdateAccount": { + "methods": [ + "update_account" + ] + } + } + }, + "rest": { + "libraryClient": "AccountsServiceClient", + "rpcs": { + "CreateAndConfigureAccount": { + "methods": [ + "create_and_configure_account" + ] + }, + "DeleteAccount": { + "methods": [ + "delete_account" + ] + }, + "GetAccount": { + "methods": [ + "get_account" + ] + }, + "ListAccounts": { + "methods": [ + "list_accounts" + ] + }, + "ListSubAccounts": { + "methods": [ + "list_sub_accounts" + ] + }, + "UpdateAccount": { + "methods": [ + "update_account" + ] + } + } + } + } + }, + "BusinessIdentityService": { + "clients": { + "grpc": { + "libraryClient": "BusinessIdentityServiceClient", + "rpcs": { + "GetBusinessIdentity": { + "methods": [ + "get_business_identity" + ] + }, + "UpdateBusinessIdentity": { + "methods": [ + "update_business_identity" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BusinessIdentityServiceAsyncClient", + "rpcs": { + "GetBusinessIdentity": { + "methods": [ + "get_business_identity" + ] + }, + "UpdateBusinessIdentity": { + "methods": [ + "update_business_identity" + ] + } + } + }, + "rest": { + "libraryClient": "BusinessIdentityServiceClient", + "rpcs": { + "GetBusinessIdentity": { + "methods": [ + "get_business_identity" + ] + }, + "UpdateBusinessIdentity": { + "methods": [ + "update_business_identity" + ] + } + } + } + } + }, + "BusinessInfoService": { + "clients": { + "grpc": { + "libraryClient": "BusinessInfoServiceClient", + "rpcs": { + "GetBusinessInfo": { + "methods": [ + "get_business_info" + ] + }, + "UpdateBusinessInfo": { + "methods": [ + "update_business_info" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BusinessInfoServiceAsyncClient", + "rpcs": { + "GetBusinessInfo": { + "methods": [ + "get_business_info" + ] + }, + "UpdateBusinessInfo": { + "methods": [ + "update_business_info" + ] + } + } + }, + "rest": { + "libraryClient": "BusinessInfoServiceClient", + "rpcs": { + "GetBusinessInfo": { + "methods": [ + "get_business_info" + ] + }, + "UpdateBusinessInfo": { + "methods": [ + "update_business_info" + ] + } + } + } + } + }, + "EmailPreferencesService": { + "clients": { + "grpc": { + "libraryClient": "EmailPreferencesServiceClient", + "rpcs": { + "GetEmailPreferences": { + "methods": [ + "get_email_preferences" + ] + }, + "UpdateEmailPreferences": { + "methods": [ + "update_email_preferences" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EmailPreferencesServiceAsyncClient", + "rpcs": { + "GetEmailPreferences": { + "methods": [ + "get_email_preferences" + ] + }, + "UpdateEmailPreferences": { + "methods": [ + "update_email_preferences" + ] + } + } + }, + "rest": { + "libraryClient": "EmailPreferencesServiceClient", + "rpcs": { + "GetEmailPreferences": { + "methods": [ + "get_email_preferences" + ] + }, + "UpdateEmailPreferences": { + "methods": [ + "update_email_preferences" + ] + } + } + } + } + }, + "HomepageService": { + "clients": { + "grpc": { + "libraryClient": "HomepageServiceClient", + "rpcs": { + "ClaimHomepage": { + "methods": [ + "claim_homepage" + ] + }, + "GetHomepage": { + "methods": [ + "get_homepage" + ] + }, + "UnclaimHomepage": { + "methods": [ + "unclaim_homepage" + ] + }, + "UpdateHomepage": { + "methods": [ + "update_homepage" + ] + } + } + }, + "grpc-async": { + "libraryClient": "HomepageServiceAsyncClient", + "rpcs": { + "ClaimHomepage": { + "methods": [ + "claim_homepage" + ] + }, + "GetHomepage": { + "methods": [ + "get_homepage" + ] + }, + "UnclaimHomepage": { + "methods": [ + "unclaim_homepage" + ] + }, + "UpdateHomepage": { + "methods": [ + "update_homepage" + ] + } + } + }, + "rest": { + "libraryClient": "HomepageServiceClient", + "rpcs": { + "ClaimHomepage": { + "methods": [ + "claim_homepage" + ] + }, + "GetHomepage": { + "methods": [ + "get_homepage" + ] + }, + "UnclaimHomepage": { + "methods": [ + "unclaim_homepage" + ] + }, + "UpdateHomepage": { + "methods": [ + "update_homepage" + ] + } + } + } + } + }, + "OnlineReturnPolicyService": { + "clients": { + "grpc": { + "libraryClient": "OnlineReturnPolicyServiceClient", + "rpcs": { + "GetOnlineReturnPolicy": { + "methods": [ + "get_online_return_policy" + ] + }, + "ListOnlineReturnPolicies": { + "methods": [ + "list_online_return_policies" + ] + } + } + }, + "grpc-async": { + "libraryClient": "OnlineReturnPolicyServiceAsyncClient", + "rpcs": { + "GetOnlineReturnPolicy": { + "methods": [ + "get_online_return_policy" + ] + }, + "ListOnlineReturnPolicies": { + "methods": [ + "list_online_return_policies" + ] + } + } + }, + "rest": { + "libraryClient": "OnlineReturnPolicyServiceClient", + "rpcs": { + "GetOnlineReturnPolicy": { + "methods": [ + "get_online_return_policy" + ] + }, + "ListOnlineReturnPolicies": { + "methods": [ + "list_online_return_policies" + ] + } + } + } + } + }, + "ProgramsService": { + "clients": { + "grpc": { + "libraryClient": "ProgramsServiceClient", + "rpcs": { + "DisableProgram": { + "methods": [ + "disable_program" + ] + }, + "EnableProgram": { + "methods": [ + "enable_program" + ] + }, + "GetProgram": { + "methods": [ + "get_program" + ] + }, + "ListPrograms": { + "methods": [ + "list_programs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ProgramsServiceAsyncClient", + "rpcs": { + "DisableProgram": { + "methods": [ + "disable_program" + ] + }, + "EnableProgram": { + "methods": [ + "enable_program" + ] + }, + "GetProgram": { + "methods": [ + "get_program" + ] + }, + "ListPrograms": { + "methods": [ + "list_programs" + ] + } + } + }, + "rest": { + "libraryClient": "ProgramsServiceClient", + "rpcs": { + "DisableProgram": { + "methods": [ + "disable_program" + ] + }, + "EnableProgram": { + "methods": [ + "enable_program" + ] + }, + "GetProgram": { + "methods": [ + "get_program" + ] + }, + "ListPrograms": { + "methods": [ + "list_programs" + ] + } + } + } + } + }, + "RegionsService": { + "clients": { + "grpc": { + "libraryClient": "RegionsServiceClient", + "rpcs": { + "CreateRegion": { + "methods": [ + "create_region" + ] + }, + "DeleteRegion": { + "methods": [ + "delete_region" + ] + }, + "GetRegion": { + "methods": [ + "get_region" + ] + }, + "ListRegions": { + "methods": [ + "list_regions" + ] + }, + "UpdateRegion": { + "methods": [ + "update_region" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RegionsServiceAsyncClient", + "rpcs": { + "CreateRegion": { + "methods": [ + "create_region" + ] + }, + "DeleteRegion": { + "methods": [ + "delete_region" + ] + }, + "GetRegion": { + "methods": [ + "get_region" + ] + }, + "ListRegions": { + "methods": [ + "list_regions" + ] + }, + "UpdateRegion": { + "methods": [ + "update_region" + ] + } + } + }, + "rest": { + "libraryClient": "RegionsServiceClient", + "rpcs": { + "CreateRegion": { + "methods": [ + "create_region" + ] + }, + "DeleteRegion": { + "methods": [ + "delete_region" + ] + }, + "GetRegion": { + "methods": [ + "get_region" + ] + }, + "ListRegions": { + "methods": [ + "list_regions" + ] + }, + "UpdateRegion": { + "methods": [ + "update_region" + ] + } + } + } + } + }, + "ShippingSettingsService": { + "clients": { + "grpc": { + "libraryClient": "ShippingSettingsServiceClient", + "rpcs": { + "GetShippingSettings": { + "methods": [ + "get_shipping_settings" + ] + }, + "InsertShippingSettings": { + "methods": [ + "insert_shipping_settings" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ShippingSettingsServiceAsyncClient", + "rpcs": { + "GetShippingSettings": { + "methods": [ + "get_shipping_settings" + ] + }, + "InsertShippingSettings": { + "methods": [ + "insert_shipping_settings" + ] + } + } + }, + "rest": { + "libraryClient": "ShippingSettingsServiceClient", + "rpcs": { + "GetShippingSettings": { + "methods": [ + "get_shipping_settings" + ] + }, + "InsertShippingSettings": { + "methods": [ + "insert_shipping_settings" + ] + } + } + } + } + }, + "TermsOfServiceAgreementStateService": { + "clients": { + "grpc": { + "libraryClient": "TermsOfServiceAgreementStateServiceClient", + "rpcs": { + "GetTermsOfServiceAgreementState": { + "methods": [ + "get_terms_of_service_agreement_state" + ] + }, + "RetrieveForApplicationTermsOfServiceAgreementState": { + "methods": [ + "retrieve_for_application_terms_of_service_agreement_state" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TermsOfServiceAgreementStateServiceAsyncClient", + "rpcs": { + "GetTermsOfServiceAgreementState": { + "methods": [ + "get_terms_of_service_agreement_state" + ] + }, + "RetrieveForApplicationTermsOfServiceAgreementState": { + "methods": [ + "retrieve_for_application_terms_of_service_agreement_state" + ] + } + } + }, + "rest": { + "libraryClient": "TermsOfServiceAgreementStateServiceClient", + "rpcs": { + "GetTermsOfServiceAgreementState": { + "methods": [ + "get_terms_of_service_agreement_state" + ] + }, + "RetrieveForApplicationTermsOfServiceAgreementState": { + "methods": [ + "retrieve_for_application_terms_of_service_agreement_state" + ] + } + } + } + } + }, + "TermsOfServiceService": { + "clients": { + "grpc": { + "libraryClient": "TermsOfServiceServiceClient", + "rpcs": { + "AcceptTermsOfService": { + "methods": [ + "accept_terms_of_service" + ] + }, + "GetTermsOfService": { + "methods": [ + "get_terms_of_service" + ] + }, + "RetrieveLatestTermsOfService": { + "methods": [ + "retrieve_latest_terms_of_service" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TermsOfServiceServiceAsyncClient", + "rpcs": { + "AcceptTermsOfService": { + "methods": [ + "accept_terms_of_service" + ] + }, + "GetTermsOfService": { + "methods": [ + "get_terms_of_service" + ] + }, + "RetrieveLatestTermsOfService": { + "methods": [ + "retrieve_latest_terms_of_service" + ] + } + } + }, + "rest": { + "libraryClient": "TermsOfServiceServiceClient", + "rpcs": { + "AcceptTermsOfService": { + "methods": [ + "accept_terms_of_service" + ] + }, + "GetTermsOfService": { + "methods": [ + "get_terms_of_service" + ] + }, + "RetrieveLatestTermsOfService": { + "methods": [ + "retrieve_latest_terms_of_service" + ] + } + } + } + } + }, + "UserService": { + "clients": { + "grpc": { + "libraryClient": "UserServiceClient", + "rpcs": { + "CreateUser": { + "methods": [ + "create_user" + ] + }, + "DeleteUser": { + "methods": [ + "delete_user" + ] + }, + "GetUser": { + "methods": [ + "get_user" + ] + }, + "ListUsers": { + "methods": [ + "list_users" + ] + }, + "UpdateUser": { + "methods": [ + "update_user" + ] + } + } + }, + "grpc-async": { + "libraryClient": "UserServiceAsyncClient", + "rpcs": { + "CreateUser": { + "methods": [ + "create_user" + ] + }, + "DeleteUser": { + "methods": [ + "delete_user" + ] + }, + "GetUser": { + "methods": [ + "get_user" + ] + }, + "ListUsers": { + "methods": [ + "list_users" + ] + }, + "UpdateUser": { + "methods": [ + "update_user" + ] + } + } + }, + "rest": { + "libraryClient": "UserServiceClient", + "rpcs": { + "CreateUser": { + "methods": [ + "create_user" + ] + }, + "DeleteUser": { + "methods": [ + "delete_user" + ] + }, + "GetUser": { + "methods": [ + "get_user" + ] + }, + "ListUsers": { + "methods": [ + "list_users" + ] + }, + "UpdateUser": { + "methods": [ + "update_user" + ] + } + } + } + } + } + } +} diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py new file mode 100644 index 000000000000..0c7cc68730c4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/py.typed b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/py.typed new file mode 100644 index 000000000000..19aa2588b0f7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-accounts package uses inline types. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/__init__.py new file mode 100644 index 000000000000..2142f10f0b3e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AccountIssueServiceAsyncClient +from .client import AccountIssueServiceClient + +__all__ = ( + "AccountIssueServiceClient", + "AccountIssueServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py new file mode 100644 index 000000000000..87623fe7d5c2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py @@ -0,0 +1,401 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.account_issue_service import ( + pagers, +) +from google.shopping.merchant_accounts_v1beta.types import accountissue + +from .client import AccountIssueServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AccountIssueServiceTransport +from .transports.grpc_asyncio import AccountIssueServiceGrpcAsyncIOTransport + + +class AccountIssueServiceAsyncClient: + """Service to support ``AccountIssueService`` API.""" + + _client: AccountIssueServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AccountIssueServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AccountIssueServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AccountIssueServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AccountIssueServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(AccountIssueServiceClient.account_path) + parse_account_path = staticmethod(AccountIssueServiceClient.parse_account_path) + account_issue_path = staticmethod(AccountIssueServiceClient.account_issue_path) + parse_account_issue_path = staticmethod( + AccountIssueServiceClient.parse_account_issue_path + ) + common_billing_account_path = staticmethod( + AccountIssueServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AccountIssueServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AccountIssueServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AccountIssueServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AccountIssueServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AccountIssueServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(AccountIssueServiceClient.common_project_path) + parse_common_project_path = staticmethod( + AccountIssueServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(AccountIssueServiceClient.common_location_path) + parse_common_location_path = staticmethod( + AccountIssueServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountIssueServiceAsyncClient: The constructed client. + """ + return AccountIssueServiceClient.from_service_account_info.__func__(AccountIssueServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountIssueServiceAsyncClient: The constructed client. + """ + return AccountIssueServiceClient.from_service_account_file.__func__(AccountIssueServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AccountIssueServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AccountIssueServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountIssueServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(AccountIssueServiceClient).get_transport_class, + type(AccountIssueServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AccountIssueServiceTransport, + Callable[..., AccountIssueServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the account issue service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccountIssueServiceTransport,Callable[..., AccountIssueServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccountIssueServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AccountIssueServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_account_issues( + self, + request: Optional[Union[accountissue.ListAccountIssuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountIssuesAsyncPager: + r"""Lists all account issues of a Merchant Center + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_account_issues(): + # Create a client + client = merchant_accounts_v1beta.AccountIssueServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountIssuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_issues(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesRequest, dict]]): + The request object. Request message for the ``ListAccountIssues`` method. + parent (:class:`str`): + Required. The parent, which owns this collection of + issues. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.account_issue_service.pagers.ListAccountIssuesAsyncPager: + Response message for the ListAccountIssues method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accountissue.ListAccountIssuesRequest): + request = accountissue.ListAccountIssuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_account_issues + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAccountIssuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AccountIssueServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountIssueServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py new file mode 100644 index 000000000000..85d8a3b4c4e3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py @@ -0,0 +1,831 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.account_issue_service import ( + pagers, +) +from google.shopping.merchant_accounts_v1beta.types import accountissue + +from .transports.base import DEFAULT_CLIENT_INFO, AccountIssueServiceTransport +from .transports.grpc import AccountIssueServiceGrpcTransport +from .transports.grpc_asyncio import AccountIssueServiceGrpcAsyncIOTransport +from .transports.rest import AccountIssueServiceRestTransport + + +class AccountIssueServiceClientMeta(type): + """Metaclass for the AccountIssueService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AccountIssueServiceTransport]] + _transport_registry["grpc"] = AccountIssueServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AccountIssueServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AccountIssueServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AccountIssueServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AccountIssueServiceClient(metaclass=AccountIssueServiceClientMeta): + """Service to support ``AccountIssueService`` API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountIssueServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountIssueServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AccountIssueServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountIssueServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def account_issue_path( + account: str, + issue: str, + ) -> str: + """Returns a fully-qualified account_issue string.""" + return "accounts/{account}/issues/{issue}".format( + account=account, + issue=issue, + ) + + @staticmethod + def parse_account_issue_path(path: str) -> Dict[str, str]: + """Parses a account_issue path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/issues/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AccountIssueServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AccountIssueServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AccountIssueServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AccountIssueServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AccountIssueServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AccountIssueServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AccountIssueServiceTransport, + Callable[..., AccountIssueServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the account issue service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccountIssueServiceTransport,Callable[..., AccountIssueServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccountIssueServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AccountIssueServiceClient._read_environment_variables() + self._client_cert_source = AccountIssueServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AccountIssueServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AccountIssueServiceTransport) + if transport_provided: + # transport is a AccountIssueServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AccountIssueServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or AccountIssueServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AccountIssueServiceTransport], + Callable[..., AccountIssueServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AccountIssueServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_account_issues( + self, + request: Optional[Union[accountissue.ListAccountIssuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountIssuesPager: + r"""Lists all account issues of a Merchant Center + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_account_issues(): + # Create a client + client = merchant_accounts_v1beta.AccountIssueServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountIssuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_issues(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesRequest, dict]): + The request object. Request message for the ``ListAccountIssues`` method. + parent (str): + Required. The parent, which owns this collection of + issues. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.account_issue_service.pagers.ListAccountIssuesPager: + Response message for the ListAccountIssues method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accountissue.ListAccountIssuesRequest): + request = accountissue.ListAccountIssuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_account_issues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAccountIssuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AccountIssueServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountIssueServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/pagers.py new file mode 100644 index 000000000000..4715485af090 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_accounts_v1beta.types import accountissue + + +class ListAccountIssuesPager: + """A pager for iterating through ``list_account_issues`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``account_issues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAccountIssues`` requests and continue to iterate + through the ``account_issues`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., accountissue.ListAccountIssuesResponse], + request: accountissue.ListAccountIssuesRequest, + response: accountissue.ListAccountIssuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = accountissue.ListAccountIssuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[accountissue.ListAccountIssuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[accountissue.AccountIssue]: + for page in self.pages: + yield from page.account_issues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAccountIssuesAsyncPager: + """A pager for iterating through ``list_account_issues`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``account_issues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAccountIssues`` requests and continue to iterate + through the ``account_issues`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[accountissue.ListAccountIssuesResponse]], + request: accountissue.ListAccountIssuesRequest, + response: accountissue.ListAccountIssuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = accountissue.ListAccountIssuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[accountissue.ListAccountIssuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[accountissue.AccountIssue]: + async def async_generator(): + async for page in self.pages: + for response in page.account_issues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/__init__.py new file mode 100644 index 000000000000..06ec679263ee --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AccountIssueServiceTransport +from .grpc import AccountIssueServiceGrpcTransport +from .grpc_asyncio import AccountIssueServiceGrpcAsyncIOTransport +from .rest import AccountIssueServiceRestInterceptor, AccountIssueServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AccountIssueServiceTransport]] +_transport_registry["grpc"] = AccountIssueServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AccountIssueServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AccountIssueServiceRestTransport + +__all__ = ( + "AccountIssueServiceTransport", + "AccountIssueServiceGrpcTransport", + "AccountIssueServiceGrpcAsyncIOTransport", + "AccountIssueServiceRestTransport", + "AccountIssueServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/base.py new file mode 100644 index 000000000000..4c2fbce043a8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/base.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import accountissue + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AccountIssueServiceTransport(abc.ABC): + """Abstract transport class for AccountIssueService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_account_issues: gapic_v1.method.wrap_method( + self.list_account_issues, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_account_issues( + self, + ) -> Callable[ + [accountissue.ListAccountIssuesRequest], + Union[ + accountissue.ListAccountIssuesResponse, + Awaitable[accountissue.ListAccountIssuesResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AccountIssueServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/grpc.py new file mode 100644 index 000000000000..226b3a0cd5b4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/grpc.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import accountissue + +from .base import DEFAULT_CLIENT_INFO, AccountIssueServiceTransport + + +class AccountIssueServiceGrpcTransport(AccountIssueServiceTransport): + """gRPC backend transport for AccountIssueService. + + Service to support ``AccountIssueService`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_account_issues( + self, + ) -> Callable[ + [accountissue.ListAccountIssuesRequest], accountissue.ListAccountIssuesResponse + ]: + r"""Return a callable for the list account issues method over gRPC. + + Lists all account issues of a Merchant Center + account. + + Returns: + Callable[[~.ListAccountIssuesRequest], + ~.ListAccountIssuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_account_issues" not in self._stubs: + self._stubs["list_account_issues"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountIssueService/ListAccountIssues", + request_serializer=accountissue.ListAccountIssuesRequest.serialize, + response_deserializer=accountissue.ListAccountIssuesResponse.deserialize, + ) + return self._stubs["list_account_issues"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AccountIssueServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..9a6cd423f08e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/grpc_asyncio.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import accountissue + +from .base import DEFAULT_CLIENT_INFO, AccountIssueServiceTransport +from .grpc import AccountIssueServiceGrpcTransport + + +class AccountIssueServiceGrpcAsyncIOTransport(AccountIssueServiceTransport): + """gRPC AsyncIO backend transport for AccountIssueService. + + Service to support ``AccountIssueService`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_account_issues( + self, + ) -> Callable[ + [accountissue.ListAccountIssuesRequest], + Awaitable[accountissue.ListAccountIssuesResponse], + ]: + r"""Return a callable for the list account issues method over gRPC. + + Lists all account issues of a Merchant Center + account. + + Returns: + Callable[[~.ListAccountIssuesRequest], + Awaitable[~.ListAccountIssuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_account_issues" not in self._stubs: + self._stubs["list_account_issues"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountIssueService/ListAccountIssues", + request_serializer=accountissue.ListAccountIssuesRequest.serialize, + response_deserializer=accountissue.ListAccountIssuesResponse.deserialize, + ) + return self._stubs["list_account_issues"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_account_issues: gapic_v1.method_async.wrap_method( + self.list_account_issues, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AccountIssueServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/rest.py new file mode 100644 index 000000000000..c62775d22a36 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/transports/rest.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import accountissue + +from .base import AccountIssueServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AccountIssueServiceRestInterceptor: + """Interceptor for AccountIssueService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AccountIssueServiceRestTransport. + + .. code-block:: python + class MyCustomAccountIssueServiceInterceptor(AccountIssueServiceRestInterceptor): + def pre_list_account_issues(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_account_issues(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AccountIssueServiceRestTransport(interceptor=MyCustomAccountIssueServiceInterceptor()) + client = AccountIssueServiceClient(transport=transport) + + + """ + + def pre_list_account_issues( + self, + request: accountissue.ListAccountIssuesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[accountissue.ListAccountIssuesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_account_issues + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountIssueService server. + """ + return request, metadata + + def post_list_account_issues( + self, response: accountissue.ListAccountIssuesResponse + ) -> accountissue.ListAccountIssuesResponse: + """Post-rpc interceptor for list_account_issues + + Override in a subclass to manipulate the response + after it is returned by the AccountIssueService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AccountIssueServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AccountIssueServiceRestInterceptor + + +class AccountIssueServiceRestTransport(AccountIssueServiceTransport): + """REST backend transport for AccountIssueService. + + Service to support ``AccountIssueService`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AccountIssueServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AccountIssueServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListAccountIssues(AccountIssueServiceRestStub): + def __hash__(self): + return hash("ListAccountIssues") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: accountissue.ListAccountIssuesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accountissue.ListAccountIssuesResponse: + r"""Call the list account issues method over HTTP. + + Args: + request (~.accountissue.ListAccountIssuesRequest): + The request object. Request message for the ``ListAccountIssues`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.accountissue.ListAccountIssuesResponse: + Response message for the ``ListAccountIssues`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/issues", + }, + ] + request, metadata = self._interceptor.pre_list_account_issues( + request, metadata + ) + pb_request = accountissue.ListAccountIssuesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = accountissue.ListAccountIssuesResponse() + pb_resp = accountissue.ListAccountIssuesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_account_issues(resp) + return resp + + @property + def list_account_issues( + self, + ) -> Callable[ + [accountissue.ListAccountIssuesRequest], accountissue.ListAccountIssuesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAccountIssues(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AccountIssueServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/__init__.py new file mode 100644 index 000000000000..a161df03ec9e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AccountTaxServiceAsyncClient +from .client import AccountTaxServiceClient + +__all__ = ( + "AccountTaxServiceClient", + "AccountTaxServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py new file mode 100644 index 000000000000..1a6213055f4a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py @@ -0,0 +1,640 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.account_tax_service import pagers +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax +from google.shopping.merchant_accounts_v1beta.types import tax_rule + +from .client import AccountTaxServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AccountTaxServiceTransport +from .transports.grpc_asyncio import AccountTaxServiceGrpcAsyncIOTransport + + +class AccountTaxServiceAsyncClient: + """Manages account level tax setting data. + + This API defines the following resource model: + + - [AccountTax][google.shopping.merchant.accounts.v1main.AccountTax] + """ + + _client: AccountTaxServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AccountTaxServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AccountTaxServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AccountTaxServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AccountTaxServiceClient._DEFAULT_UNIVERSE + + account_tax_path = staticmethod(AccountTaxServiceClient.account_tax_path) + parse_account_tax_path = staticmethod( + AccountTaxServiceClient.parse_account_tax_path + ) + common_billing_account_path = staticmethod( + AccountTaxServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AccountTaxServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AccountTaxServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AccountTaxServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AccountTaxServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AccountTaxServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(AccountTaxServiceClient.common_project_path) + parse_common_project_path = staticmethod( + AccountTaxServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(AccountTaxServiceClient.common_location_path) + parse_common_location_path = staticmethod( + AccountTaxServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountTaxServiceAsyncClient: The constructed client. + """ + return AccountTaxServiceClient.from_service_account_info.__func__(AccountTaxServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountTaxServiceAsyncClient: The constructed client. + """ + return AccountTaxServiceClient.from_service_account_file.__func__(AccountTaxServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AccountTaxServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AccountTaxServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountTaxServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(AccountTaxServiceClient).get_transport_class, type(AccountTaxServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AccountTaxServiceTransport, + Callable[..., AccountTaxServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the account tax service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccountTaxServiceTransport,Callable[..., AccountTaxServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccountTaxServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AccountTaxServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_account_tax( + self, + request: Optional[Union[account_tax.GetAccountTaxRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> account_tax.AccountTax: + r"""Returns the tax rules that match the conditions of + GetAccountTaxRequest + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountTaxRequest( + name="name_value", + ) + + # Make the request + response = await client.get_account_tax(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetAccountTaxRequest, dict]]): + The request object. Request to get tax settings + name (:class:`str`): + Required. The name from which tax + settings will be retrieved + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AccountTax: + The tax settings of a merchant + account. All methods require the admin + role. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, account_tax.GetAccountTaxRequest): + request = account_tax.GetAccountTaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_account_tax + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_account_tax( + self, + request: Optional[Union[account_tax.ListAccountTaxRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountTaxAsyncPager: + r"""Lists the tax settings of the sub-accounts only in + your Merchant Center account. + This method can only be called on a multi-client + account, otherwise it'll return an error. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountTaxRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_tax(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListAccountTaxRequest, dict]]): + The request object. Request to list all sub-account tax + settings only for the requesting + merchant This method can only be called + on a multi-client account, otherwise + it'll return an error. + parent (:class:`str`): + Required. The parent, which owns this + collection of account tax. Format: + accounts/{account} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.account_tax_service.pagers.ListAccountTaxAsyncPager: + Response to account tax list request + This method can only be called on a + multi-client account, otherwise it'll + return an error. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, account_tax.ListAccountTaxRequest): + request = account_tax.ListAccountTaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_account_tax + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAccountTaxAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_account_tax( + self, + request: Optional[Union[gsma_account_tax.UpdateAccountTaxRequest, dict]] = None, + *, + account_tax: Optional[gsma_account_tax.AccountTax] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_account_tax.AccountTax: + r"""Updates the tax settings of the account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateAccountTaxRequest( + ) + + # Make the request + response = await client.update_account_tax(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateAccountTaxRequest, dict]]): + The request object. Request to update the tax settings + account_tax (:class:`google.shopping.merchant_accounts_v1beta.types.AccountTax`): + Required. The tax setting that will + be updated + + This corresponds to the ``account_tax`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AccountTax: + The tax settings of a merchant + account. All methods require the admin + role. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([account_tax, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_account_tax.UpdateAccountTaxRequest): + request = gsma_account_tax.UpdateAccountTaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if account_tax is not None: + request.account_tax = account_tax + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_account_tax + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("account_tax.name", request.account_tax.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AccountTaxServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountTaxServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py new file mode 100644 index 000000000000..81a94cf8c119 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py @@ -0,0 +1,1052 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.account_tax_service import pagers +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax +from google.shopping.merchant_accounts_v1beta.types import tax_rule + +from .transports.base import DEFAULT_CLIENT_INFO, AccountTaxServiceTransport +from .transports.grpc import AccountTaxServiceGrpcTransport +from .transports.grpc_asyncio import AccountTaxServiceGrpcAsyncIOTransport +from .transports.rest import AccountTaxServiceRestTransport + + +class AccountTaxServiceClientMeta(type): + """Metaclass for the AccountTaxService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AccountTaxServiceTransport]] + _transport_registry["grpc"] = AccountTaxServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AccountTaxServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AccountTaxServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AccountTaxServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AccountTaxServiceClient(metaclass=AccountTaxServiceClientMeta): + """Manages account level tax setting data. + + This API defines the following resource model: + + - [AccountTax][google.shopping.merchant.accounts.v1main.AccountTax] + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountTaxServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountTaxServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AccountTaxServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountTaxServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_tax_path( + account: str, + tax: str, + ) -> str: + """Returns a fully-qualified account_tax string.""" + return "accounts/{account}/accounttax/{tax}".format( + account=account, + tax=tax, + ) + + @staticmethod + def parse_account_tax_path(path: str) -> Dict[str, str]: + """Parses a account_tax path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/accounttax/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AccountTaxServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AccountTaxServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AccountTaxServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AccountTaxServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AccountTaxServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AccountTaxServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AccountTaxServiceTransport, + Callable[..., AccountTaxServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the account tax service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccountTaxServiceTransport,Callable[..., AccountTaxServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccountTaxServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AccountTaxServiceClient._read_environment_variables() + self._client_cert_source = AccountTaxServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AccountTaxServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AccountTaxServiceTransport) + if transport_provided: + # transport is a AccountTaxServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AccountTaxServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or AccountTaxServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AccountTaxServiceTransport], + Callable[..., AccountTaxServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AccountTaxServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_account_tax( + self, + request: Optional[Union[account_tax.GetAccountTaxRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> account_tax.AccountTax: + r"""Returns the tax rules that match the conditions of + GetAccountTaxRequest + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountTaxRequest( + name="name_value", + ) + + # Make the request + response = client.get_account_tax(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetAccountTaxRequest, dict]): + The request object. Request to get tax settings + name (str): + Required. The name from which tax + settings will be retrieved + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AccountTax: + The tax settings of a merchant + account. All methods require the admin + role. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, account_tax.GetAccountTaxRequest): + request = account_tax.GetAccountTaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_account_tax] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_account_tax( + self, + request: Optional[Union[account_tax.ListAccountTaxRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountTaxPager: + r"""Lists the tax settings of the sub-accounts only in + your Merchant Center account. + This method can only be called on a multi-client + account, otherwise it'll return an error. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountTaxRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_tax(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListAccountTaxRequest, dict]): + The request object. Request to list all sub-account tax + settings only for the requesting + merchant This method can only be called + on a multi-client account, otherwise + it'll return an error. + parent (str): + Required. The parent, which owns this + collection of account tax. Format: + accounts/{account} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.account_tax_service.pagers.ListAccountTaxPager: + Response to account tax list request + This method can only be called on a + multi-client account, otherwise it'll + return an error. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, account_tax.ListAccountTaxRequest): + request = account_tax.ListAccountTaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_account_tax] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAccountTaxPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_account_tax( + self, + request: Optional[Union[gsma_account_tax.UpdateAccountTaxRequest, dict]] = None, + *, + account_tax: Optional[gsma_account_tax.AccountTax] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_account_tax.AccountTax: + r"""Updates the tax settings of the account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateAccountTaxRequest( + ) + + # Make the request + response = client.update_account_tax(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateAccountTaxRequest, dict]): + The request object. Request to update the tax settings + account_tax (google.shopping.merchant_accounts_v1beta.types.AccountTax): + Required. The tax setting that will + be updated + + This corresponds to the ``account_tax`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AccountTax: + The tax settings of a merchant + account. All methods require the admin + role. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([account_tax, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_account_tax.UpdateAccountTaxRequest): + request = gsma_account_tax.UpdateAccountTaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if account_tax is not None: + request.account_tax = account_tax + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_account_tax] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("account_tax.name", request.account_tax.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AccountTaxServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountTaxServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/pagers.py new file mode 100644 index 000000000000..706cfeaaf369 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_accounts_v1beta.types import account_tax + + +class ListAccountTaxPager: + """A pager for iterating through ``list_account_tax`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountTaxResponse` object, and + provides an ``__iter__`` method to iterate through its + ``account_taxes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAccountTax`` requests and continue to iterate + through the ``account_taxes`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountTaxResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., account_tax.ListAccountTaxResponse], + request: account_tax.ListAccountTaxRequest, + response: account_tax.ListAccountTaxResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListAccountTaxRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListAccountTaxResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = account_tax.ListAccountTaxRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[account_tax.ListAccountTaxResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[account_tax.AccountTax]: + for page in self.pages: + yield from page.account_taxes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAccountTaxAsyncPager: + """A pager for iterating through ``list_account_tax`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountTaxResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``account_taxes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAccountTax`` requests and continue to iterate + through the ``account_taxes`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountTaxResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[account_tax.ListAccountTaxResponse]], + request: account_tax.ListAccountTaxRequest, + response: account_tax.ListAccountTaxResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListAccountTaxRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListAccountTaxResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = account_tax.ListAccountTaxRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[account_tax.ListAccountTaxResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[account_tax.AccountTax]: + async def async_generator(): + async for page in self.pages: + for response in page.account_taxes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/__init__.py new file mode 100644 index 000000000000..34836f558523 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AccountTaxServiceTransport +from .grpc import AccountTaxServiceGrpcTransport +from .grpc_asyncio import AccountTaxServiceGrpcAsyncIOTransport +from .rest import AccountTaxServiceRestInterceptor, AccountTaxServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AccountTaxServiceTransport]] +_transport_registry["grpc"] = AccountTaxServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AccountTaxServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AccountTaxServiceRestTransport + +__all__ = ( + "AccountTaxServiceTransport", + "AccountTaxServiceGrpcTransport", + "AccountTaxServiceGrpcAsyncIOTransport", + "AccountTaxServiceRestTransport", + "AccountTaxServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/base.py new file mode 100644 index 000000000000..648bec494bd9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/base.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AccountTaxServiceTransport(abc.ABC): + """Abstract transport class for AccountTaxService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_account_tax: gapic_v1.method.wrap_method( + self.get_account_tax, + default_timeout=None, + client_info=client_info, + ), + self.list_account_tax: gapic_v1.method.wrap_method( + self.list_account_tax, + default_timeout=None, + client_info=client_info, + ), + self.update_account_tax: gapic_v1.method.wrap_method( + self.update_account_tax, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_account_tax( + self, + ) -> Callable[ + [account_tax.GetAccountTaxRequest], + Union[account_tax.AccountTax, Awaitable[account_tax.AccountTax]], + ]: + raise NotImplementedError() + + @property + def list_account_tax( + self, + ) -> Callable[ + [account_tax.ListAccountTaxRequest], + Union[ + account_tax.ListAccountTaxResponse, + Awaitable[account_tax.ListAccountTaxResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_account_tax( + self, + ) -> Callable[ + [gsma_account_tax.UpdateAccountTaxRequest], + Union[gsma_account_tax.AccountTax, Awaitable[gsma_account_tax.AccountTax]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AccountTaxServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/grpc.py new file mode 100644 index 000000000000..fdbcc1b5c11f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/grpc.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax + +from .base import DEFAULT_CLIENT_INFO, AccountTaxServiceTransport + + +class AccountTaxServiceGrpcTransport(AccountTaxServiceTransport): + """gRPC backend transport for AccountTaxService. + + Manages account level tax setting data. + + This API defines the following resource model: + + - [AccountTax][google.shopping.merchant.accounts.v1main.AccountTax] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_account_tax( + self, + ) -> Callable[[account_tax.GetAccountTaxRequest], account_tax.AccountTax]: + r"""Return a callable for the get account tax method over gRPC. + + Returns the tax rules that match the conditions of + GetAccountTaxRequest + + Returns: + Callable[[~.GetAccountTaxRequest], + ~.AccountTax]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_account_tax" not in self._stubs: + self._stubs["get_account_tax"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountTaxService/GetAccountTax", + request_serializer=account_tax.GetAccountTaxRequest.serialize, + response_deserializer=account_tax.AccountTax.deserialize, + ) + return self._stubs["get_account_tax"] + + @property + def list_account_tax( + self, + ) -> Callable[ + [account_tax.ListAccountTaxRequest], account_tax.ListAccountTaxResponse + ]: + r"""Return a callable for the list account tax method over gRPC. + + Lists the tax settings of the sub-accounts only in + your Merchant Center account. + This method can only be called on a multi-client + account, otherwise it'll return an error. + + Returns: + Callable[[~.ListAccountTaxRequest], + ~.ListAccountTaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_account_tax" not in self._stubs: + self._stubs["list_account_tax"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountTaxService/ListAccountTax", + request_serializer=account_tax.ListAccountTaxRequest.serialize, + response_deserializer=account_tax.ListAccountTaxResponse.deserialize, + ) + return self._stubs["list_account_tax"] + + @property + def update_account_tax( + self, + ) -> Callable[ + [gsma_account_tax.UpdateAccountTaxRequest], gsma_account_tax.AccountTax + ]: + r"""Return a callable for the update account tax method over gRPC. + + Updates the tax settings of the account. + + Returns: + Callable[[~.UpdateAccountTaxRequest], + ~.AccountTax]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_account_tax" not in self._stubs: + self._stubs["update_account_tax"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountTaxService/UpdateAccountTax", + request_serializer=gsma_account_tax.UpdateAccountTaxRequest.serialize, + response_deserializer=gsma_account_tax.AccountTax.deserialize, + ) + return self._stubs["update_account_tax"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AccountTaxServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3b7d3e0085b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/grpc_asyncio.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax + +from .base import DEFAULT_CLIENT_INFO, AccountTaxServiceTransport +from .grpc import AccountTaxServiceGrpcTransport + + +class AccountTaxServiceGrpcAsyncIOTransport(AccountTaxServiceTransport): + """gRPC AsyncIO backend transport for AccountTaxService. + + Manages account level tax setting data. + + This API defines the following resource model: + + - [AccountTax][google.shopping.merchant.accounts.v1main.AccountTax] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_account_tax( + self, + ) -> Callable[ + [account_tax.GetAccountTaxRequest], Awaitable[account_tax.AccountTax] + ]: + r"""Return a callable for the get account tax method over gRPC. + + Returns the tax rules that match the conditions of + GetAccountTaxRequest + + Returns: + Callable[[~.GetAccountTaxRequest], + Awaitable[~.AccountTax]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_account_tax" not in self._stubs: + self._stubs["get_account_tax"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountTaxService/GetAccountTax", + request_serializer=account_tax.GetAccountTaxRequest.serialize, + response_deserializer=account_tax.AccountTax.deserialize, + ) + return self._stubs["get_account_tax"] + + @property + def list_account_tax( + self, + ) -> Callable[ + [account_tax.ListAccountTaxRequest], + Awaitable[account_tax.ListAccountTaxResponse], + ]: + r"""Return a callable for the list account tax method over gRPC. + + Lists the tax settings of the sub-accounts only in + your Merchant Center account. + This method can only be called on a multi-client + account, otherwise it'll return an error. + + Returns: + Callable[[~.ListAccountTaxRequest], + Awaitable[~.ListAccountTaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_account_tax" not in self._stubs: + self._stubs["list_account_tax"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountTaxService/ListAccountTax", + request_serializer=account_tax.ListAccountTaxRequest.serialize, + response_deserializer=account_tax.ListAccountTaxResponse.deserialize, + ) + return self._stubs["list_account_tax"] + + @property + def update_account_tax( + self, + ) -> Callable[ + [gsma_account_tax.UpdateAccountTaxRequest], + Awaitable[gsma_account_tax.AccountTax], + ]: + r"""Return a callable for the update account tax method over gRPC. + + Updates the tax settings of the account. + + Returns: + Callable[[~.UpdateAccountTaxRequest], + Awaitable[~.AccountTax]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_account_tax" not in self._stubs: + self._stubs["update_account_tax"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountTaxService/UpdateAccountTax", + request_serializer=gsma_account_tax.UpdateAccountTaxRequest.serialize, + response_deserializer=gsma_account_tax.AccountTax.deserialize, + ) + return self._stubs["update_account_tax"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_account_tax: gapic_v1.method_async.wrap_method( + self.get_account_tax, + default_timeout=None, + client_info=client_info, + ), + self.list_account_tax: gapic_v1.method_async.wrap_method( + self.list_account_tax, + default_timeout=None, + client_info=client_info, + ), + self.update_account_tax: gapic_v1.method_async.wrap_method( + self.update_account_tax, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AccountTaxServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/rest.py new file mode 100644 index 000000000000..3a254f06599d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/transports/rest.py @@ -0,0 +1,585 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax + +from .base import AccountTaxServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AccountTaxServiceRestInterceptor: + """Interceptor for AccountTaxService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AccountTaxServiceRestTransport. + + .. code-block:: python + class MyCustomAccountTaxServiceInterceptor(AccountTaxServiceRestInterceptor): + def pre_get_account_tax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_account_tax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_account_tax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_account_tax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_account_tax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_account_tax(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AccountTaxServiceRestTransport(interceptor=MyCustomAccountTaxServiceInterceptor()) + client = AccountTaxServiceClient(transport=transport) + + + """ + + def pre_get_account_tax( + self, + request: account_tax.GetAccountTaxRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[account_tax.GetAccountTaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_account_tax + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountTaxService server. + """ + return request, metadata + + def post_get_account_tax( + self, response: account_tax.AccountTax + ) -> account_tax.AccountTax: + """Post-rpc interceptor for get_account_tax + + Override in a subclass to manipulate the response + after it is returned by the AccountTaxService server but before + it is returned to user code. + """ + return response + + def pre_list_account_tax( + self, + request: account_tax.ListAccountTaxRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[account_tax.ListAccountTaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_account_tax + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountTaxService server. + """ + return request, metadata + + def post_list_account_tax( + self, response: account_tax.ListAccountTaxResponse + ) -> account_tax.ListAccountTaxResponse: + """Post-rpc interceptor for list_account_tax + + Override in a subclass to manipulate the response + after it is returned by the AccountTaxService server but before + it is returned to user code. + """ + return response + + def pre_update_account_tax( + self, + request: gsma_account_tax.UpdateAccountTaxRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsma_account_tax.UpdateAccountTaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_account_tax + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountTaxService server. + """ + return request, metadata + + def post_update_account_tax( + self, response: gsma_account_tax.AccountTax + ) -> gsma_account_tax.AccountTax: + """Post-rpc interceptor for update_account_tax + + Override in a subclass to manipulate the response + after it is returned by the AccountTaxService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AccountTaxServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AccountTaxServiceRestInterceptor + + +class AccountTaxServiceRestTransport(AccountTaxServiceTransport): + """REST backend transport for AccountTaxService. + + Manages account level tax setting data. + + This API defines the following resource model: + + - [AccountTax][google.shopping.merchant.accounts.v1main.AccountTax] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AccountTaxServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AccountTaxServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetAccountTax(AccountTaxServiceRestStub): + def __hash__(self): + return hash("GetAccountTax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: account_tax.GetAccountTaxRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> account_tax.AccountTax: + r"""Call the get account tax method over HTTP. + + Args: + request (~.account_tax.GetAccountTaxRequest): + The request object. Request to get tax settings + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.account_tax.AccountTax: + The tax settings of a merchant + account. All methods require the admin + role. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/accounttax/*}", + }, + ] + request, metadata = self._interceptor.pre_get_account_tax(request, metadata) + pb_request = account_tax.GetAccountTaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = account_tax.AccountTax() + pb_resp = account_tax.AccountTax.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_account_tax(resp) + return resp + + class _ListAccountTax(AccountTaxServiceRestStub): + def __hash__(self): + return hash("ListAccountTax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: account_tax.ListAccountTaxRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> account_tax.ListAccountTaxResponse: + r"""Call the list account tax method over HTTP. + + Args: + request (~.account_tax.ListAccountTaxRequest): + The request object. Request to list all sub-account tax + settings only for the requesting + merchant This method can only be called + on a multi-client account, otherwise + it'll return an error. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.account_tax.ListAccountTaxResponse: + Response to account tax list request + This method can only be called on a + multi-client account, otherwise it'll + return an error. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/accounttax", + }, + ] + request, metadata = self._interceptor.pre_list_account_tax( + request, metadata + ) + pb_request = account_tax.ListAccountTaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = account_tax.ListAccountTaxResponse() + pb_resp = account_tax.ListAccountTaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_account_tax(resp) + return resp + + class _UpdateAccountTax(AccountTaxServiceRestStub): + def __hash__(self): + return hash("UpdateAccountTax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsma_account_tax.UpdateAccountTaxRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_account_tax.AccountTax: + r"""Call the update account tax method over HTTP. + + Args: + request (~.gsma_account_tax.UpdateAccountTaxRequest): + The request object. Request to update the tax settings + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsma_account_tax.AccountTax: + The tax settings of a merchant + account. All methods require the admin + role. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{account_tax.name=accounts/*/accounttax/*}", + "body": "account_tax", + }, + ] + request, metadata = self._interceptor.pre_update_account_tax( + request, metadata + ) + pb_request = gsma_account_tax.UpdateAccountTaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsma_account_tax.AccountTax() + pb_resp = gsma_account_tax.AccountTax.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_account_tax(resp) + return resp + + @property + def get_account_tax( + self, + ) -> Callable[[account_tax.GetAccountTaxRequest], account_tax.AccountTax]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAccountTax(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_account_tax( + self, + ) -> Callable[ + [account_tax.ListAccountTaxRequest], account_tax.ListAccountTaxResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAccountTax(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_account_tax( + self, + ) -> Callable[ + [gsma_account_tax.UpdateAccountTaxRequest], gsma_account_tax.AccountTax + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAccountTax(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AccountTaxServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/__init__.py new file mode 100644 index 000000000000..66330cee3f6b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AccountsServiceAsyncClient +from .client import AccountsServiceClient + +__all__ = ( + "AccountsServiceClient", + "AccountsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py new file mode 100644 index 000000000000..874f80f12a8f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py @@ -0,0 +1,904 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.accounts_service import pagers +from google.shopping.merchant_accounts_v1beta.types import accounts + +from .client import AccountsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AccountsServiceTransport +from .transports.grpc_asyncio import AccountsServiceGrpcAsyncIOTransport + + +class AccountsServiceAsyncClient: + """Service to support Accounts API.""" + + _client: AccountsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AccountsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AccountsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AccountsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AccountsServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(AccountsServiceClient.account_path) + parse_account_path = staticmethod(AccountsServiceClient.parse_account_path) + terms_of_service_path = staticmethod(AccountsServiceClient.terms_of_service_path) + parse_terms_of_service_path = staticmethod( + AccountsServiceClient.parse_terms_of_service_path + ) + user_path = staticmethod(AccountsServiceClient.user_path) + parse_user_path = staticmethod(AccountsServiceClient.parse_user_path) + common_billing_account_path = staticmethod( + AccountsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AccountsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AccountsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AccountsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AccountsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AccountsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(AccountsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + AccountsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(AccountsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + AccountsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountsServiceAsyncClient: The constructed client. + """ + return AccountsServiceClient.from_service_account_info.__func__(AccountsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountsServiceAsyncClient: The constructed client. + """ + return AccountsServiceClient.from_service_account_file.__func__(AccountsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AccountsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AccountsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(AccountsServiceClient).get_transport_class, type(AccountsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, AccountsServiceTransport, Callable[..., AccountsServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the accounts service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccountsServiceTransport,Callable[..., AccountsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccountsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AccountsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_account( + self, + request: Optional[Union[accounts.GetAccountRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Retrieves an account from your Merchant Center + account. After inserting, updating, or deleting an + account, it may take several minutes before changes take + effect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountRequest( + name="name_value", + ) + + # Make the request + response = await client.get_account(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetAccountRequest, dict]]): + The request object. Request message for the ``GetAccount`` method. + name (:class:`str`): + Required. The name of the account to retrieve. Format: + ``accounts/{account}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Account: + An account. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.GetAccountRequest): + request = accounts.GetAccountRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_account + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_and_configure_account( + self, + request: Optional[ + Union[accounts.CreateAndConfigureAccountRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Creates a standalone Merchant Center account with + additional configuration. Adds the user that makes the + request as an admin for the new account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_create_and_configure_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.CreateAndConfigureAccountRequest( + account=account, + ) + + # Make the request + response = await client.create_and_configure_account(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest, dict]]): + The request object. Request message for the ``CreateAndConfigureAccount`` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Account: + An account. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.CreateAndConfigureAccountRequest): + request = accounts.CreateAndConfigureAccountRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_and_configure_account + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_account( + self, + request: Optional[Union[accounts.DeleteAccountRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified account regardless of its type: + standalone, MCA or sub-account. Deleting an MCA leads to + the deletion of all of its sub-accounts. Executing this + method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_delete_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteAccountRequest( + name="name_value", + ) + + # Make the request + await client.delete_account(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.DeleteAccountRequest, dict]]): + The request object. Request message for the ``DeleteAccount`` method. + name (:class:`str`): + Required. The name of the account to delete. Format: + ``accounts/{account}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.DeleteAccountRequest): + request = accounts.DeleteAccountRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_account + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_account( + self, + request: Optional[Union[accounts.UpdateAccountRequest, dict]] = None, + *, + account: Optional[accounts.Account] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Updates an account regardless of its type: + standalone, MCA or sub-account. Executing this method + requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.UpdateAccountRequest( + account=account, + ) + + # Make the request + response = await client.update_account(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateAccountRequest, dict]]): + The request object. Request message for the ``UpdateAccount`` method. + account (:class:`google.shopping.merchant_accounts_v1beta.types.Account`): + Required. The new version of the + account. + + This corresponds to the ``account`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Account: + An account. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([account, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.UpdateAccountRequest): + request = accounts.UpdateAccountRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if account is not None: + request.account = account + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_account + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("account.name", request.account.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_accounts( + self, + request: Optional[Union[accounts.ListAccountsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountsAsyncPager: + r"""Lists accounts accessible to the calling user and + matching the constraints of the request such as page + size or filters. This is not just listing the + sub-accounts of an MCA, but all accounts the calling + user has access to including other MCAs, linked + accounts, standalone accounts and so on. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountsRequest( + ) + + # Make the request + page_result = client.list_accounts(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListAccountsRequest, dict]]): + The request object. Request message for the ``ListAccounts`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListAccountsAsyncPager: + Response message for the ListAccounts method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.ListAccountsRequest): + request = accounts.ListAccountsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_accounts + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAccountsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sub_accounts( + self, + request: Optional[Union[accounts.ListSubAccountsRequest, dict]] = None, + *, + provider: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubAccountsAsyncPager: + r"""List all sub-accounts for a given multi client account. This is + a convenience wrapper for the more powerful ``ListAccounts`` + method. This method will produce the same results as calling + ``ListsAccounts`` with the following filter: + ``relationship(providerId={parent} AND service(type="ACCOUNT_AGGREGATION"))`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_sub_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListSubAccountsRequest( + provider="provider_value", + ) + + # Make the request + page_result = client.list_sub_accounts(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListSubAccountsRequest, dict]]): + The request object. Request message for the ``ListSubAccounts`` method. + provider (:class:`str`): + Required. The parent account. Format: + ``accounts/{account}`` + + This corresponds to the ``provider`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListSubAccountsAsyncPager: + Response message for the ListSubAccounts method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([provider]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.ListSubAccountsRequest): + request = accounts.ListSubAccountsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if provider is not None: + request.provider = provider + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sub_accounts + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("provider", request.provider),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubAccountsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AccountsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py new file mode 100644 index 000000000000..1cbdd3f570d3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py @@ -0,0 +1,1336 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.accounts_service import pagers +from google.shopping.merchant_accounts_v1beta.types import accounts + +from .transports.base import DEFAULT_CLIENT_INFO, AccountsServiceTransport +from .transports.grpc import AccountsServiceGrpcTransport +from .transports.grpc_asyncio import AccountsServiceGrpcAsyncIOTransport +from .transports.rest import AccountsServiceRestTransport + + +class AccountsServiceClientMeta(type): + """Metaclass for the AccountsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AccountsServiceTransport]] + _transport_registry["grpc"] = AccountsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AccountsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AccountsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AccountsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AccountsServiceClient(metaclass=AccountsServiceClientMeta): + """Service to support Accounts API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AccountsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def terms_of_service_path( + version: str, + ) -> str: + """Returns a fully-qualified terms_of_service string.""" + return "termsOfService/{version}".format( + version=version, + ) + + @staticmethod + def parse_terms_of_service_path(path: str) -> Dict[str, str]: + """Parses a terms_of_service path into its component segments.""" + m = re.match(r"^termsOfService/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def user_path( + account: str, + email: str, + ) -> str: + """Returns a fully-qualified user string.""" + return "accounts/{account}/users/{email}".format( + account=account, + email=email, + ) + + @staticmethod + def parse_user_path(path: str) -> Dict[str, str]: + """Parses a user path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/users/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AccountsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AccountsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AccountsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AccountsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AccountsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AccountsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, AccountsServiceTransport, Callable[..., AccountsServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the accounts service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccountsServiceTransport,Callable[..., AccountsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccountsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AccountsServiceClient._read_environment_variables() + self._client_cert_source = AccountsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AccountsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AccountsServiceTransport) + if transport_provided: + # transport is a AccountsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AccountsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or AccountsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AccountsServiceTransport], Callable[..., AccountsServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AccountsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_account( + self, + request: Optional[Union[accounts.GetAccountRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Retrieves an account from your Merchant Center + account. After inserting, updating, or deleting an + account, it may take several minutes before changes take + effect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountRequest( + name="name_value", + ) + + # Make the request + response = client.get_account(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetAccountRequest, dict]): + The request object. Request message for the ``GetAccount`` method. + name (str): + Required. The name of the account to retrieve. Format: + ``accounts/{account}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Account: + An account. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.GetAccountRequest): + request = accounts.GetAccountRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_account] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_and_configure_account( + self, + request: Optional[ + Union[accounts.CreateAndConfigureAccountRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Creates a standalone Merchant Center account with + additional configuration. Adds the user that makes the + request as an admin for the new account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_create_and_configure_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.CreateAndConfigureAccountRequest( + account=account, + ) + + # Make the request + response = client.create_and_configure_account(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest, dict]): + The request object. Request message for the ``CreateAndConfigureAccount`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Account: + An account. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.CreateAndConfigureAccountRequest): + request = accounts.CreateAndConfigureAccountRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_and_configure_account + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_account( + self, + request: Optional[Union[accounts.DeleteAccountRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified account regardless of its type: + standalone, MCA or sub-account. Deleting an MCA leads to + the deletion of all of its sub-accounts. Executing this + method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_delete_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteAccountRequest( + name="name_value", + ) + + # Make the request + client.delete_account(request=request) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.DeleteAccountRequest, dict]): + The request object. Request message for the ``DeleteAccount`` method. + name (str): + Required. The name of the account to delete. Format: + ``accounts/{account}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.DeleteAccountRequest): + request = accounts.DeleteAccountRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_account] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_account( + self, + request: Optional[Union[accounts.UpdateAccountRequest, dict]] = None, + *, + account: Optional[accounts.Account] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Updates an account regardless of its type: + standalone, MCA or sub-account. Executing this method + requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.UpdateAccountRequest( + account=account, + ) + + # Make the request + response = client.update_account(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateAccountRequest, dict]): + The request object. Request message for the ``UpdateAccount`` method. + account (google.shopping.merchant_accounts_v1beta.types.Account): + Required. The new version of the + account. + + This corresponds to the ``account`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Account: + An account. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([account, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.UpdateAccountRequest): + request = accounts.UpdateAccountRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if account is not None: + request.account = account + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_account] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("account.name", request.account.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_accounts( + self, + request: Optional[Union[accounts.ListAccountsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountsPager: + r"""Lists accounts accessible to the calling user and + matching the constraints of the request such as page + size or filters. This is not just listing the + sub-accounts of an MCA, but all accounts the calling + user has access to including other MCAs, linked + accounts, standalone accounts and so on. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountsRequest( + ) + + # Make the request + page_result = client.list_accounts(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListAccountsRequest, dict]): + The request object. Request message for the ``ListAccounts`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListAccountsPager: + Response message for the ListAccounts method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.ListAccountsRequest): + request = accounts.ListAccountsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_accounts] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAccountsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sub_accounts( + self, + request: Optional[Union[accounts.ListSubAccountsRequest, dict]] = None, + *, + provider: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubAccountsPager: + r"""List all sub-accounts for a given multi client account. This is + a convenience wrapper for the more powerful ``ListAccounts`` + method. This method will produce the same results as calling + ``ListsAccounts`` with the following filter: + ``relationship(providerId={parent} AND service(type="ACCOUNT_AGGREGATION"))`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_sub_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListSubAccountsRequest( + provider="provider_value", + ) + + # Make the request + page_result = client.list_sub_accounts(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListSubAccountsRequest, dict]): + The request object. Request message for the ``ListSubAccounts`` method. + provider (str): + Required. The parent account. Format: + ``accounts/{account}`` + + This corresponds to the ``provider`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListSubAccountsPager: + Response message for the ListSubAccounts method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([provider]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, accounts.ListSubAccountsRequest): + request = accounts.ListSubAccountsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if provider is not None: + request.provider = provider + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sub_accounts] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("provider", request.provider),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubAccountsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AccountsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountsServiceClient",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/pagers.py similarity index 60% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/pagers.py rename to packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/pagers.py index 738c2492398e..3878a5aac42f 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/pagers.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/pagers.py @@ -24,35 +24,32 @@ Tuple, ) -from google.maps.mapsplatformdatasets_v1alpha.types import ( - dataset, - maps_platform_datasets, -) +from google.shopping.merchant_accounts_v1beta.types import accounts -class ListDatasetVersionsPager: - """A pager for iterating through ``list_dataset_versions`` requests. +class ListAccountsPager: + """A pager for iterating through ``list_accounts`` requests. This class thinly wraps an initial - :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` object, and + :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountsResponse` object, and provides an ``__iter__`` method to iterate through its - ``datasets`` field. + ``accounts`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListDatasetVersions`` requests and continue to iterate - through the ``datasets`` field on the + ``ListAccounts`` requests and continue to iterate + through the ``accounts`` field on the corresponding responses. - All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., maps_platform_datasets.ListDatasetVersionsResponse], - request: maps_platform_datasets.ListDatasetVersionsRequest, - response: maps_platform_datasets.ListDatasetVersionsResponse, + method: Callable[..., accounts.ListAccountsResponse], + request: accounts.ListAccountsRequest, + response: accounts.ListAccountsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -61,15 +58,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest): + request (google.shopping.merchant_accounts_v1beta.types.ListAccountsRequest): The initial request object. - response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse): + response (google.shopping.merchant_accounts_v1beta.types.ListAccountsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = maps_platform_datasets.ListDatasetVersionsRequest(request) + self._request = accounts.ListAccountsRequest(request) self._response = response self._metadata = metadata @@ -77,46 +74,44 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[maps_platform_datasets.ListDatasetVersionsResponse]: + def pages(self) -> Iterator[accounts.ListAccountsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[dataset.Dataset]: + def __iter__(self) -> Iterator[accounts.Account]: for page in self.pages: - yield from page.datasets + yield from page.accounts def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListDatasetVersionsAsyncPager: - """A pager for iterating through ``list_dataset_versions`` requests. +class ListAccountsAsyncPager: + """A pager for iterating through ``list_accounts`` requests. This class thinly wraps an initial - :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` object, and + :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountsResponse` object, and provides an ``__aiter__`` method to iterate through its - ``datasets`` field. + ``accounts`` field. If there are more pages, the ``__aiter__`` method will make additional - ``ListDatasetVersions`` requests and continue to iterate - through the ``datasets`` field on the + ``ListAccounts`` requests and continue to iterate + through the ``accounts`` field on the corresponding responses. - All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListAccountsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[ - ..., Awaitable[maps_platform_datasets.ListDatasetVersionsResponse] - ], - request: maps_platform_datasets.ListDatasetVersionsRequest, - response: maps_platform_datasets.ListDatasetVersionsResponse, + method: Callable[..., Awaitable[accounts.ListAccountsResponse]], + request: accounts.ListAccountsRequest, + response: accounts.ListAccountsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -125,15 +120,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest): + request (google.shopping.merchant_accounts_v1beta.types.ListAccountsRequest): The initial request object. - response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse): + response (google.shopping.merchant_accounts_v1beta.types.ListAccountsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = maps_platform_datasets.ListDatasetVersionsRequest(request) + self._request = accounts.ListAccountsRequest(request) self._response = response self._metadata = metadata @@ -141,19 +136,17 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages( - self, - ) -> AsyncIterator[maps_platform_datasets.ListDatasetVersionsResponse]: + async def pages(self) -> AsyncIterator[accounts.ListAccountsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[dataset.Dataset]: + def __aiter__(self) -> AsyncIterator[accounts.Account]: async def async_generator(): async for page in self.pages: - for response in page.datasets: + for response in page.accounts: yield response return async_generator() @@ -162,29 +155,29 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListDatasetsPager: - """A pager for iterating through ``list_datasets`` requests. +class ListSubAccountsPager: + """A pager for iterating through ``list_sub_accounts`` requests. This class thinly wraps an initial - :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` object, and + :class:`google.shopping.merchant_accounts_v1beta.types.ListSubAccountsResponse` object, and provides an ``__iter__`` method to iterate through its - ``datasets`` field. + ``accounts`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListDatasets`` requests and continue to iterate - through the ``datasets`` field on the + ``ListSubAccounts`` requests and continue to iterate + through the ``accounts`` field on the corresponding responses. - All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListSubAccountsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., maps_platform_datasets.ListDatasetsResponse], - request: maps_platform_datasets.ListDatasetsRequest, - response: maps_platform_datasets.ListDatasetsResponse, + method: Callable[..., accounts.ListSubAccountsResponse], + request: accounts.ListSubAccountsRequest, + response: accounts.ListSubAccountsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -193,15 +186,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest): + request (google.shopping.merchant_accounts_v1beta.types.ListSubAccountsRequest): The initial request object. - response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse): + response (google.shopping.merchant_accounts_v1beta.types.ListSubAccountsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = maps_platform_datasets.ListDatasetsRequest(request) + self._request = accounts.ListSubAccountsRequest(request) self._response = response self._metadata = metadata @@ -209,44 +202,44 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[maps_platform_datasets.ListDatasetsResponse]: + def pages(self) -> Iterator[accounts.ListSubAccountsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[dataset.Dataset]: + def __iter__(self) -> Iterator[accounts.Account]: for page in self.pages: - yield from page.datasets + yield from page.accounts def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListDatasetsAsyncPager: - """A pager for iterating through ``list_datasets`` requests. +class ListSubAccountsAsyncPager: + """A pager for iterating through ``list_sub_accounts`` requests. This class thinly wraps an initial - :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` object, and + :class:`google.shopping.merchant_accounts_v1beta.types.ListSubAccountsResponse` object, and provides an ``__aiter__`` method to iterate through its - ``datasets`` field. + ``accounts`` field. If there are more pages, the ``__aiter__`` method will make additional - ``ListDatasets`` requests and continue to iterate - through the ``datasets`` field on the + ``ListSubAccounts`` requests and continue to iterate + through the ``accounts`` field on the corresponding responses. - All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListSubAccountsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., Awaitable[maps_platform_datasets.ListDatasetsResponse]], - request: maps_platform_datasets.ListDatasetsRequest, - response: maps_platform_datasets.ListDatasetsResponse, + method: Callable[..., Awaitable[accounts.ListSubAccountsResponse]], + request: accounts.ListSubAccountsRequest, + response: accounts.ListSubAccountsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -255,15 +248,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest): + request (google.shopping.merchant_accounts_v1beta.types.ListSubAccountsRequest): The initial request object. - response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse): + response (google.shopping.merchant_accounts_v1beta.types.ListSubAccountsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = maps_platform_datasets.ListDatasetsRequest(request) + self._request = accounts.ListSubAccountsRequest(request) self._response = response self._metadata = metadata @@ -271,17 +264,17 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[maps_platform_datasets.ListDatasetsResponse]: + async def pages(self) -> AsyncIterator[accounts.ListSubAccountsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[dataset.Dataset]: + def __aiter__(self) -> AsyncIterator[accounts.Account]: async def async_generator(): async for page in self.pages: - for response in page.datasets: + for response in page.accounts: yield response return async_generator() diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/__init__.py new file mode 100644 index 000000000000..b70e195444cf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AccountsServiceTransport +from .grpc import AccountsServiceGrpcTransport +from .grpc_asyncio import AccountsServiceGrpcAsyncIOTransport +from .rest import AccountsServiceRestInterceptor, AccountsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AccountsServiceTransport]] +_transport_registry["grpc"] = AccountsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AccountsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AccountsServiceRestTransport + +__all__ = ( + "AccountsServiceTransport", + "AccountsServiceGrpcTransport", + "AccountsServiceGrpcAsyncIOTransport", + "AccountsServiceRestTransport", + "AccountsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/base.py new file mode 100644 index 000000000000..17cfb6d370f5 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import accounts + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AccountsServiceTransport(abc.ABC): + """Abstract transport class for AccountsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_account: gapic_v1.method.wrap_method( + self.get_account, + default_timeout=None, + client_info=client_info, + ), + self.create_and_configure_account: gapic_v1.method.wrap_method( + self.create_and_configure_account, + default_timeout=None, + client_info=client_info, + ), + self.delete_account: gapic_v1.method.wrap_method( + self.delete_account, + default_timeout=None, + client_info=client_info, + ), + self.update_account: gapic_v1.method.wrap_method( + self.update_account, + default_timeout=None, + client_info=client_info, + ), + self.list_accounts: gapic_v1.method.wrap_method( + self.list_accounts, + default_timeout=None, + client_info=client_info, + ), + self.list_sub_accounts: gapic_v1.method.wrap_method( + self.list_sub_accounts, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_account( + self, + ) -> Callable[ + [accounts.GetAccountRequest], + Union[accounts.Account, Awaitable[accounts.Account]], + ]: + raise NotImplementedError() + + @property + def create_and_configure_account( + self, + ) -> Callable[ + [accounts.CreateAndConfigureAccountRequest], + Union[accounts.Account, Awaitable[accounts.Account]], + ]: + raise NotImplementedError() + + @property + def delete_account( + self, + ) -> Callable[ + [accounts.DeleteAccountRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_account( + self, + ) -> Callable[ + [accounts.UpdateAccountRequest], + Union[accounts.Account, Awaitable[accounts.Account]], + ]: + raise NotImplementedError() + + @property + def list_accounts( + self, + ) -> Callable[ + [accounts.ListAccountsRequest], + Union[accounts.ListAccountsResponse, Awaitable[accounts.ListAccountsResponse]], + ]: + raise NotImplementedError() + + @property + def list_sub_accounts( + self, + ) -> Callable[ + [accounts.ListSubAccountsRequest], + Union[ + accounts.ListSubAccountsResponse, + Awaitable[accounts.ListSubAccountsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AccountsServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py new file mode 100644 index 000000000000..e8077679c9be --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import accounts + +from .base import DEFAULT_CLIENT_INFO, AccountsServiceTransport + + +class AccountsServiceGrpcTransport(AccountsServiceTransport): + """gRPC backend transport for AccountsService. + + Service to support Accounts API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_account(self) -> Callable[[accounts.GetAccountRequest], accounts.Account]: + r"""Return a callable for the get account method over gRPC. + + Retrieves an account from your Merchant Center + account. After inserting, updating, or deleting an + account, it may take several minutes before changes take + effect. + + Returns: + Callable[[~.GetAccountRequest], + ~.Account]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_account" not in self._stubs: + self._stubs["get_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/GetAccount", + request_serializer=accounts.GetAccountRequest.serialize, + response_deserializer=accounts.Account.deserialize, + ) + return self._stubs["get_account"] + + @property + def create_and_configure_account( + self, + ) -> Callable[[accounts.CreateAndConfigureAccountRequest], accounts.Account]: + r"""Return a callable for the create and configure account method over gRPC. + + Creates a standalone Merchant Center account with + additional configuration. Adds the user that makes the + request as an admin for the new account. + + Returns: + Callable[[~.CreateAndConfigureAccountRequest], + ~.Account]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_and_configure_account" not in self._stubs: + self._stubs["create_and_configure_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/CreateAndConfigureAccount", + request_serializer=accounts.CreateAndConfigureAccountRequest.serialize, + response_deserializer=accounts.Account.deserialize, + ) + return self._stubs["create_and_configure_account"] + + @property + def delete_account( + self, + ) -> Callable[[accounts.DeleteAccountRequest], empty_pb2.Empty]: + r"""Return a callable for the delete account method over gRPC. + + Deletes the specified account regardless of its type: + standalone, MCA or sub-account. Deleting an MCA leads to + the deletion of all of its sub-accounts. Executing this + method requires admin access. + + Returns: + Callable[[~.DeleteAccountRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_account" not in self._stubs: + self._stubs["delete_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/DeleteAccount", + request_serializer=accounts.DeleteAccountRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_account"] + + @property + def update_account( + self, + ) -> Callable[[accounts.UpdateAccountRequest], accounts.Account]: + r"""Return a callable for the update account method over gRPC. + + Updates an account regardless of its type: + standalone, MCA or sub-account. Executing this method + requires admin access. + + Returns: + Callable[[~.UpdateAccountRequest], + ~.Account]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_account" not in self._stubs: + self._stubs["update_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/UpdateAccount", + request_serializer=accounts.UpdateAccountRequest.serialize, + response_deserializer=accounts.Account.deserialize, + ) + return self._stubs["update_account"] + + @property + def list_accounts( + self, + ) -> Callable[[accounts.ListAccountsRequest], accounts.ListAccountsResponse]: + r"""Return a callable for the list accounts method over gRPC. + + Lists accounts accessible to the calling user and + matching the constraints of the request such as page + size or filters. This is not just listing the + sub-accounts of an MCA, but all accounts the calling + user has access to including other MCAs, linked + accounts, standalone accounts and so on. + + Returns: + Callable[[~.ListAccountsRequest], + ~.ListAccountsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_accounts" not in self._stubs: + self._stubs["list_accounts"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/ListAccounts", + request_serializer=accounts.ListAccountsRequest.serialize, + response_deserializer=accounts.ListAccountsResponse.deserialize, + ) + return self._stubs["list_accounts"] + + @property + def list_sub_accounts( + self, + ) -> Callable[[accounts.ListSubAccountsRequest], accounts.ListSubAccountsResponse]: + r"""Return a callable for the list sub accounts method over gRPC. + + List all sub-accounts for a given multi client account. This is + a convenience wrapper for the more powerful ``ListAccounts`` + method. This method will produce the same results as calling + ``ListsAccounts`` with the following filter: + ``relationship(providerId={parent} AND service(type="ACCOUNT_AGGREGATION"))`` + + Returns: + Callable[[~.ListSubAccountsRequest], + ~.ListSubAccountsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sub_accounts" not in self._stubs: + self._stubs["list_sub_accounts"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/ListSubAccounts", + request_serializer=accounts.ListSubAccountsRequest.serialize, + response_deserializer=accounts.ListSubAccountsResponse.deserialize, + ) + return self._stubs["list_sub_accounts"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AccountsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..99dbd8c9497b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py @@ -0,0 +1,461 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import accounts + +from .base import DEFAULT_CLIENT_INFO, AccountsServiceTransport +from .grpc import AccountsServiceGrpcTransport + + +class AccountsServiceGrpcAsyncIOTransport(AccountsServiceTransport): + """gRPC AsyncIO backend transport for AccountsService. + + Service to support Accounts API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_account( + self, + ) -> Callable[[accounts.GetAccountRequest], Awaitable[accounts.Account]]: + r"""Return a callable for the get account method over gRPC. + + Retrieves an account from your Merchant Center + account. After inserting, updating, or deleting an + account, it may take several minutes before changes take + effect. + + Returns: + Callable[[~.GetAccountRequest], + Awaitable[~.Account]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_account" not in self._stubs: + self._stubs["get_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/GetAccount", + request_serializer=accounts.GetAccountRequest.serialize, + response_deserializer=accounts.Account.deserialize, + ) + return self._stubs["get_account"] + + @property + def create_and_configure_account( + self, + ) -> Callable[ + [accounts.CreateAndConfigureAccountRequest], Awaitable[accounts.Account] + ]: + r"""Return a callable for the create and configure account method over gRPC. + + Creates a standalone Merchant Center account with + additional configuration. Adds the user that makes the + request as an admin for the new account. + + Returns: + Callable[[~.CreateAndConfigureAccountRequest], + Awaitable[~.Account]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_and_configure_account" not in self._stubs: + self._stubs["create_and_configure_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/CreateAndConfigureAccount", + request_serializer=accounts.CreateAndConfigureAccountRequest.serialize, + response_deserializer=accounts.Account.deserialize, + ) + return self._stubs["create_and_configure_account"] + + @property + def delete_account( + self, + ) -> Callable[[accounts.DeleteAccountRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete account method over gRPC. + + Deletes the specified account regardless of its type: + standalone, MCA or sub-account. Deleting an MCA leads to + the deletion of all of its sub-accounts. Executing this + method requires admin access. + + Returns: + Callable[[~.DeleteAccountRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_account" not in self._stubs: + self._stubs["delete_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/DeleteAccount", + request_serializer=accounts.DeleteAccountRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_account"] + + @property + def update_account( + self, + ) -> Callable[[accounts.UpdateAccountRequest], Awaitable[accounts.Account]]: + r"""Return a callable for the update account method over gRPC. + + Updates an account regardless of its type: + standalone, MCA or sub-account. Executing this method + requires admin access. + + Returns: + Callable[[~.UpdateAccountRequest], + Awaitable[~.Account]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_account" not in self._stubs: + self._stubs["update_account"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/UpdateAccount", + request_serializer=accounts.UpdateAccountRequest.serialize, + response_deserializer=accounts.Account.deserialize, + ) + return self._stubs["update_account"] + + @property + def list_accounts( + self, + ) -> Callable[ + [accounts.ListAccountsRequest], Awaitable[accounts.ListAccountsResponse] + ]: + r"""Return a callable for the list accounts method over gRPC. + + Lists accounts accessible to the calling user and + matching the constraints of the request such as page + size or filters. This is not just listing the + sub-accounts of an MCA, but all accounts the calling + user has access to including other MCAs, linked + accounts, standalone accounts and so on. + + Returns: + Callable[[~.ListAccountsRequest], + Awaitable[~.ListAccountsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_accounts" not in self._stubs: + self._stubs["list_accounts"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/ListAccounts", + request_serializer=accounts.ListAccountsRequest.serialize, + response_deserializer=accounts.ListAccountsResponse.deserialize, + ) + return self._stubs["list_accounts"] + + @property + def list_sub_accounts( + self, + ) -> Callable[ + [accounts.ListSubAccountsRequest], Awaitable[accounts.ListSubAccountsResponse] + ]: + r"""Return a callable for the list sub accounts method over gRPC. + + List all sub-accounts for a given multi client account. This is + a convenience wrapper for the more powerful ``ListAccounts`` + method. This method will produce the same results as calling + ``ListsAccounts`` with the following filter: + ``relationship(providerId={parent} AND service(type="ACCOUNT_AGGREGATION"))`` + + Returns: + Callable[[~.ListSubAccountsRequest], + Awaitable[~.ListSubAccountsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sub_accounts" not in self._stubs: + self._stubs["list_sub_accounts"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AccountsService/ListSubAccounts", + request_serializer=accounts.ListSubAccountsRequest.serialize, + response_deserializer=accounts.ListSubAccountsResponse.deserialize, + ) + return self._stubs["list_sub_accounts"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_account: gapic_v1.method_async.wrap_method( + self.get_account, + default_timeout=None, + client_info=client_info, + ), + self.create_and_configure_account: gapic_v1.method_async.wrap_method( + self.create_and_configure_account, + default_timeout=None, + client_info=client_info, + ), + self.delete_account: gapic_v1.method_async.wrap_method( + self.delete_account, + default_timeout=None, + client_info=client_info, + ), + self.update_account: gapic_v1.method_async.wrap_method( + self.update_account, + default_timeout=None, + client_info=client_info, + ), + self.list_accounts: gapic_v1.method_async.wrap_method( + self.list_accounts, + default_timeout=None, + client_info=client_info, + ), + self.list_sub_accounts: gapic_v1.method_async.wrap_method( + self.list_sub_accounts, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AccountsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/rest.py new file mode 100644 index 000000000000..30b38720cf32 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/rest.py @@ -0,0 +1,897 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import accounts + +from .base import AccountsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AccountsServiceRestInterceptor: + """Interceptor for AccountsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AccountsServiceRestTransport. + + .. code-block:: python + class MyCustomAccountsServiceInterceptor(AccountsServiceRestInterceptor): + def pre_create_and_configure_account(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_and_configure_account(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_account(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_account(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_account(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_accounts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_accounts(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sub_accounts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sub_accounts(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_account(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_account(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AccountsServiceRestTransport(interceptor=MyCustomAccountsServiceInterceptor()) + client = AccountsServiceClient(transport=transport) + + + """ + + def pre_create_and_configure_account( + self, + request: accounts.CreateAndConfigureAccountRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[accounts.CreateAndConfigureAccountRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_and_configure_account + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountsService server. + """ + return request, metadata + + def post_create_and_configure_account( + self, response: accounts.Account + ) -> accounts.Account: + """Post-rpc interceptor for create_and_configure_account + + Override in a subclass to manipulate the response + after it is returned by the AccountsService server but before + it is returned to user code. + """ + return response + + def pre_delete_account( + self, + request: accounts.DeleteAccountRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[accounts.DeleteAccountRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_account + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountsService server. + """ + return request, metadata + + def pre_get_account( + self, request: accounts.GetAccountRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[accounts.GetAccountRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_account + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountsService server. + """ + return request, metadata + + def post_get_account(self, response: accounts.Account) -> accounts.Account: + """Post-rpc interceptor for get_account + + Override in a subclass to manipulate the response + after it is returned by the AccountsService server but before + it is returned to user code. + """ + return response + + def pre_list_accounts( + self, request: accounts.ListAccountsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[accounts.ListAccountsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_accounts + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountsService server. + """ + return request, metadata + + def post_list_accounts( + self, response: accounts.ListAccountsResponse + ) -> accounts.ListAccountsResponse: + """Post-rpc interceptor for list_accounts + + Override in a subclass to manipulate the response + after it is returned by the AccountsService server but before + it is returned to user code. + """ + return response + + def pre_list_sub_accounts( + self, + request: accounts.ListSubAccountsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[accounts.ListSubAccountsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_sub_accounts + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountsService server. + """ + return request, metadata + + def post_list_sub_accounts( + self, response: accounts.ListSubAccountsResponse + ) -> accounts.ListSubAccountsResponse: + """Post-rpc interceptor for list_sub_accounts + + Override in a subclass to manipulate the response + after it is returned by the AccountsService server but before + it is returned to user code. + """ + return response + + def pre_update_account( + self, + request: accounts.UpdateAccountRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[accounts.UpdateAccountRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_account + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccountsService server. + """ + return request, metadata + + def post_update_account(self, response: accounts.Account) -> accounts.Account: + """Post-rpc interceptor for update_account + + Override in a subclass to manipulate the response + after it is returned by the AccountsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AccountsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AccountsServiceRestInterceptor + + +class AccountsServiceRestTransport(AccountsServiceTransport): + """REST backend transport for AccountsService. + + Service to support Accounts API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AccountsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AccountsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateAndConfigureAccount(AccountsServiceRestStub): + def __hash__(self): + return hash("CreateAndConfigureAccount") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: accounts.CreateAndConfigureAccountRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Call the create and configure + account method over HTTP. + + Args: + request (~.accounts.CreateAndConfigureAccountRequest): + The request object. Request message for the ``CreateAndConfigureAccount`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.accounts.Account: + An account. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/accounts:createAndConfigure", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_and_configure_account( + request, metadata + ) + pb_request = accounts.CreateAndConfigureAccountRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = accounts.Account() + pb_resp = accounts.Account.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_and_configure_account(resp) + return resp + + class _DeleteAccount(AccountsServiceRestStub): + def __hash__(self): + return hash("DeleteAccount") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: accounts.DeleteAccountRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete account method over HTTP. + + Args: + request (~.accounts.DeleteAccountRequest): + The request object. Request message for the ``DeleteAccount`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/accounts/v1beta/{name=accounts/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_account(request, metadata) + pb_request = accounts.DeleteAccountRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetAccount(AccountsServiceRestStub): + def __hash__(self): + return hash("GetAccount") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: accounts.GetAccountRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Call the get account method over HTTP. + + Args: + request (~.accounts.GetAccountRequest): + The request object. Request message for the ``GetAccount`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.accounts.Account: + An account. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*}", + }, + ] + request, metadata = self._interceptor.pre_get_account(request, metadata) + pb_request = accounts.GetAccountRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = accounts.Account() + pb_resp = accounts.Account.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_account(resp) + return resp + + class _ListAccounts(AccountsServiceRestStub): + def __hash__(self): + return hash("ListAccounts") + + def __call__( + self, + request: accounts.ListAccountsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.ListAccountsResponse: + r"""Call the list accounts method over HTTP. + + Args: + request (~.accounts.ListAccountsRequest): + The request object. Request message for the ``ListAccounts`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.accounts.ListAccountsResponse: + Response message for the ``ListAccounts`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/accounts", + }, + ] + request, metadata = self._interceptor.pre_list_accounts(request, metadata) + pb_request = accounts.ListAccountsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = accounts.ListAccountsResponse() + pb_resp = accounts.ListAccountsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_accounts(resp) + return resp + + class _ListSubAccounts(AccountsServiceRestStub): + def __hash__(self): + return hash("ListSubAccounts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: accounts.ListSubAccountsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.ListSubAccountsResponse: + r"""Call the list sub accounts method over HTTP. + + Args: + request (~.accounts.ListSubAccountsRequest): + The request object. Request message for the ``ListSubAccounts`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.accounts.ListSubAccountsResponse: + Response message for the ``ListSubAccounts`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{provider=accounts/*}:listSubaccounts", + }, + ] + request, metadata = self._interceptor.pre_list_sub_accounts( + request, metadata + ) + pb_request = accounts.ListSubAccountsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = accounts.ListSubAccountsResponse() + pb_resp = accounts.ListSubAccountsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_sub_accounts(resp) + return resp + + class _UpdateAccount(AccountsServiceRestStub): + def __hash__(self): + return hash("UpdateAccount") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: accounts.UpdateAccountRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts.Account: + r"""Call the update account method over HTTP. + + Args: + request (~.accounts.UpdateAccountRequest): + The request object. Request message for the ``UpdateAccount`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.accounts.Account: + An account. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{account.name=accounts/*}", + "body": "account", + }, + ] + request, metadata = self._interceptor.pre_update_account(request, metadata) + pb_request = accounts.UpdateAccountRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = accounts.Account() + pb_resp = accounts.Account.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_account(resp) + return resp + + @property + def create_and_configure_account( + self, + ) -> Callable[[accounts.CreateAndConfigureAccountRequest], accounts.Account]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAndConfigureAccount(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_account( + self, + ) -> Callable[[accounts.DeleteAccountRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAccount(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_account(self) -> Callable[[accounts.GetAccountRequest], accounts.Account]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAccount(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_accounts( + self, + ) -> Callable[[accounts.ListAccountsRequest], accounts.ListAccountsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAccounts(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sub_accounts( + self, + ) -> Callable[[accounts.ListSubAccountsRequest], accounts.ListSubAccountsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSubAccounts(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_account( + self, + ) -> Callable[[accounts.UpdateAccountRequest], accounts.Account]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAccount(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AccountsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/__init__.py new file mode 100644 index 000000000000..54c7765ba3bc --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BusinessIdentityServiceAsyncClient +from .client import BusinessIdentityServiceClient + +__all__ = ( + "BusinessIdentityServiceClient", + "BusinessIdentityServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py new file mode 100644 index 000000000000..686902735e1a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import businessidentity + +from .client import BusinessIdentityServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BusinessIdentityServiceTransport +from .transports.grpc_asyncio import BusinessIdentityServiceGrpcAsyncIOTransport + + +class BusinessIdentityServiceAsyncClient: + """Service to support `business + identity `__ + API. + """ + + _client: BusinessIdentityServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BusinessIdentityServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BusinessIdentityServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + BusinessIdentityServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = BusinessIdentityServiceClient._DEFAULT_UNIVERSE + + business_identity_path = staticmethod( + BusinessIdentityServiceClient.business_identity_path + ) + parse_business_identity_path = staticmethod( + BusinessIdentityServiceClient.parse_business_identity_path + ) + common_billing_account_path = staticmethod( + BusinessIdentityServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BusinessIdentityServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BusinessIdentityServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + BusinessIdentityServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BusinessIdentityServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BusinessIdentityServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + BusinessIdentityServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + BusinessIdentityServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + BusinessIdentityServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + BusinessIdentityServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessIdentityServiceAsyncClient: The constructed client. + """ + return BusinessIdentityServiceClient.from_service_account_info.__func__(BusinessIdentityServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessIdentityServiceAsyncClient: The constructed client. + """ + return BusinessIdentityServiceClient.from_service_account_file.__func__(BusinessIdentityServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BusinessIdentityServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BusinessIdentityServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessIdentityServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(BusinessIdentityServiceClient).get_transport_class, + type(BusinessIdentityServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessIdentityServiceTransport, + Callable[..., BusinessIdentityServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business identity service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessIdentityServiceTransport,Callable[..., BusinessIdentityServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessIdentityServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BusinessIdentityServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_business_identity( + self, + request: Optional[ + Union[businessidentity.GetBusinessIdentityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessidentity.BusinessIdentity: + r"""Retrieves the business identity of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessIdentityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_business_identity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetBusinessIdentityRequest, dict]]): + The request object. Request message for the ``GetBusinessIdentity`` method. + name (:class:`str`): + Required. The resource name of the business identity. + Format: ``accounts/{account}/businessIdentity`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessIdentity: + Collection of information related to the [identity of a + business](\ https://support.google.com/merchants/answer/12564247). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessidentity.GetBusinessIdentityRequest): + request = businessidentity.GetBusinessIdentityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_business_identity + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_business_identity( + self, + request: Optional[ + Union[businessidentity.UpdateBusinessIdentityRequest, dict] + ] = None, + *, + business_identity: Optional[businessidentity.BusinessIdentity] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessidentity.BusinessIdentity: + r"""Updates the business identity of an account. + Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessIdentityRequest( + ) + + # Make the request + response = await client.update_business_identity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateBusinessIdentityRequest, dict]]): + The request object. Request message for the ``UpdateBusinessIdentity`` + method. + business_identity (:class:`google.shopping.merchant_accounts_v1beta.types.BusinessIdentity`): + Required. The new version of the + business identity. + + This corresponds to the ``business_identity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessIdentity: + Collection of information related to the [identity of a + business](\ https://support.google.com/merchants/answer/12564247). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([business_identity, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessidentity.UpdateBusinessIdentityRequest): + request = businessidentity.UpdateBusinessIdentityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if business_identity is not None: + request.business_identity = business_identity + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_business_identity + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("business_identity.name", request.business_identity.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BusinessIdentityServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BusinessIdentityServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py new file mode 100644 index 000000000000..ec0111bfaf1e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py @@ -0,0 +1,926 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import businessidentity + +from .transports.base import DEFAULT_CLIENT_INFO, BusinessIdentityServiceTransport +from .transports.grpc import BusinessIdentityServiceGrpcTransport +from .transports.grpc_asyncio import BusinessIdentityServiceGrpcAsyncIOTransport +from .transports.rest import BusinessIdentityServiceRestTransport + + +class BusinessIdentityServiceClientMeta(type): + """Metaclass for the BusinessIdentityService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[BusinessIdentityServiceTransport]] + _transport_registry["grpc"] = BusinessIdentityServiceGrpcTransport + _transport_registry["grpc_asyncio"] = BusinessIdentityServiceGrpcAsyncIOTransport + _transport_registry["rest"] = BusinessIdentityServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BusinessIdentityServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BusinessIdentityServiceClient(metaclass=BusinessIdentityServiceClientMeta): + """Service to support `business + identity `__ + API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessIdentityServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessIdentityServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BusinessIdentityServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessIdentityServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def business_identity_path( + account: str, + ) -> str: + """Returns a fully-qualified business_identity string.""" + return "accounts/{account}/businessIdentity".format( + account=account, + ) + + @staticmethod + def parse_business_identity_path(path: str) -> Dict[str, str]: + """Parses a business_identity path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/businessIdentity$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BusinessIdentityServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BusinessIdentityServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + BusinessIdentityServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BusinessIdentityServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = BusinessIdentityServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or BusinessIdentityServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessIdentityServiceTransport, + Callable[..., BusinessIdentityServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business identity service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessIdentityServiceTransport,Callable[..., BusinessIdentityServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessIdentityServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BusinessIdentityServiceClient._read_environment_variables() + self._client_cert_source = ( + BusinessIdentityServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = BusinessIdentityServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BusinessIdentityServiceTransport) + if transport_provided: + # transport is a BusinessIdentityServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BusinessIdentityServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or BusinessIdentityServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BusinessIdentityServiceTransport], + Callable[..., BusinessIdentityServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BusinessIdentityServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_business_identity( + self, + request: Optional[ + Union[businessidentity.GetBusinessIdentityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessidentity.BusinessIdentity: + r"""Retrieves the business identity of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessIdentityRequest( + name="name_value", + ) + + # Make the request + response = client.get_business_identity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetBusinessIdentityRequest, dict]): + The request object. Request message for the ``GetBusinessIdentity`` method. + name (str): + Required. The resource name of the business identity. + Format: ``accounts/{account}/businessIdentity`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessIdentity: + Collection of information related to the [identity of a + business](\ https://support.google.com/merchants/answer/12564247). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessidentity.GetBusinessIdentityRequest): + request = businessidentity.GetBusinessIdentityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_business_identity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_business_identity( + self, + request: Optional[ + Union[businessidentity.UpdateBusinessIdentityRequest, dict] + ] = None, + *, + business_identity: Optional[businessidentity.BusinessIdentity] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessidentity.BusinessIdentity: + r"""Updates the business identity of an account. + Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessIdentityRequest( + ) + + # Make the request + response = client.update_business_identity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateBusinessIdentityRequest, dict]): + The request object. Request message for the ``UpdateBusinessIdentity`` + method. + business_identity (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity): + Required. The new version of the + business identity. + + This corresponds to the ``business_identity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessIdentity: + Collection of information related to the [identity of a + business](\ https://support.google.com/merchants/answer/12564247). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([business_identity, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessidentity.UpdateBusinessIdentityRequest): + request = businessidentity.UpdateBusinessIdentityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if business_identity is not None: + request.business_identity = business_identity + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_business_identity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("business_identity.name", request.business_identity.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BusinessIdentityServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BusinessIdentityServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/__init__.py new file mode 100644 index 000000000000..e9b443954772 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BusinessIdentityServiceTransport +from .grpc import BusinessIdentityServiceGrpcTransport +from .grpc_asyncio import BusinessIdentityServiceGrpcAsyncIOTransport +from .rest import ( + BusinessIdentityServiceRestInterceptor, + BusinessIdentityServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[BusinessIdentityServiceTransport]] +_transport_registry["grpc"] = BusinessIdentityServiceGrpcTransport +_transport_registry["grpc_asyncio"] = BusinessIdentityServiceGrpcAsyncIOTransport +_transport_registry["rest"] = BusinessIdentityServiceRestTransport + +__all__ = ( + "BusinessIdentityServiceTransport", + "BusinessIdentityServiceGrpcTransport", + "BusinessIdentityServiceGrpcAsyncIOTransport", + "BusinessIdentityServiceRestTransport", + "BusinessIdentityServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/base.py new file mode 100644 index 000000000000..fe41952b850d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/base.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import businessidentity + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class BusinessIdentityServiceTransport(abc.ABC): + """Abstract transport class for BusinessIdentityService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_business_identity: gapic_v1.method.wrap_method( + self.get_business_identity, + default_timeout=None, + client_info=client_info, + ), + self.update_business_identity: gapic_v1.method.wrap_method( + self.update_business_identity, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_business_identity( + self, + ) -> Callable[ + [businessidentity.GetBusinessIdentityRequest], + Union[ + businessidentity.BusinessIdentity, + Awaitable[businessidentity.BusinessIdentity], + ], + ]: + raise NotImplementedError() + + @property + def update_business_identity( + self, + ) -> Callable[ + [businessidentity.UpdateBusinessIdentityRequest], + Union[ + businessidentity.BusinessIdentity, + Awaitable[businessidentity.BusinessIdentity], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BusinessIdentityServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/grpc.py new file mode 100644 index 000000000000..2b6badf182b9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/grpc.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import businessidentity + +from .base import DEFAULT_CLIENT_INFO, BusinessIdentityServiceTransport + + +class BusinessIdentityServiceGrpcTransport(BusinessIdentityServiceTransport): + """gRPC backend transport for BusinessIdentityService. + + Service to support `business + identity `__ + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_business_identity( + self, + ) -> Callable[ + [businessidentity.GetBusinessIdentityRequest], businessidentity.BusinessIdentity + ]: + r"""Return a callable for the get business identity method over gRPC. + + Retrieves the business identity of an account. + + Returns: + Callable[[~.GetBusinessIdentityRequest], + ~.BusinessIdentity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_business_identity" not in self._stubs: + self._stubs["get_business_identity"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessIdentityService/GetBusinessIdentity", + request_serializer=businessidentity.GetBusinessIdentityRequest.serialize, + response_deserializer=businessidentity.BusinessIdentity.deserialize, + ) + return self._stubs["get_business_identity"] + + @property + def update_business_identity( + self, + ) -> Callable[ + [businessidentity.UpdateBusinessIdentityRequest], + businessidentity.BusinessIdentity, + ]: + r"""Return a callable for the update business identity method over gRPC. + + Updates the business identity of an account. + Executing this method requires admin access. + + Returns: + Callable[[~.UpdateBusinessIdentityRequest], + ~.BusinessIdentity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_business_identity" not in self._stubs: + self._stubs["update_business_identity"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessIdentityService/UpdateBusinessIdentity", + request_serializer=businessidentity.UpdateBusinessIdentityRequest.serialize, + response_deserializer=businessidentity.BusinessIdentity.deserialize, + ) + return self._stubs["update_business_identity"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BusinessIdentityServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..91cd145faa32 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/grpc_asyncio.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import businessidentity + +from .base import DEFAULT_CLIENT_INFO, BusinessIdentityServiceTransport +from .grpc import BusinessIdentityServiceGrpcTransport + + +class BusinessIdentityServiceGrpcAsyncIOTransport(BusinessIdentityServiceTransport): + """gRPC AsyncIO backend transport for BusinessIdentityService. + + Service to support `business + identity `__ + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_business_identity( + self, + ) -> Callable[ + [businessidentity.GetBusinessIdentityRequest], + Awaitable[businessidentity.BusinessIdentity], + ]: + r"""Return a callable for the get business identity method over gRPC. + + Retrieves the business identity of an account. + + Returns: + Callable[[~.GetBusinessIdentityRequest], + Awaitable[~.BusinessIdentity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_business_identity" not in self._stubs: + self._stubs["get_business_identity"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessIdentityService/GetBusinessIdentity", + request_serializer=businessidentity.GetBusinessIdentityRequest.serialize, + response_deserializer=businessidentity.BusinessIdentity.deserialize, + ) + return self._stubs["get_business_identity"] + + @property + def update_business_identity( + self, + ) -> Callable[ + [businessidentity.UpdateBusinessIdentityRequest], + Awaitable[businessidentity.BusinessIdentity], + ]: + r"""Return a callable for the update business identity method over gRPC. + + Updates the business identity of an account. + Executing this method requires admin access. + + Returns: + Callable[[~.UpdateBusinessIdentityRequest], + Awaitable[~.BusinessIdentity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_business_identity" not in self._stubs: + self._stubs["update_business_identity"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessIdentityService/UpdateBusinessIdentity", + request_serializer=businessidentity.UpdateBusinessIdentityRequest.serialize, + response_deserializer=businessidentity.BusinessIdentity.deserialize, + ) + return self._stubs["update_business_identity"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_business_identity: gapic_v1.method_async.wrap_method( + self.get_business_identity, + default_timeout=None, + client_info=client_info, + ), + self.update_business_identity: gapic_v1.method_async.wrap_method( + self.update_business_identity, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("BusinessIdentityServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/rest.py new file mode 100644 index 000000000000..09662fb5d730 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/transports/rest.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import businessidentity + +from .base import BusinessIdentityServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BusinessIdentityServiceRestInterceptor: + """Interceptor for BusinessIdentityService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BusinessIdentityServiceRestTransport. + + .. code-block:: python + class MyCustomBusinessIdentityServiceInterceptor(BusinessIdentityServiceRestInterceptor): + def pre_get_business_identity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_business_identity(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_business_identity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_business_identity(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BusinessIdentityServiceRestTransport(interceptor=MyCustomBusinessIdentityServiceInterceptor()) + client = BusinessIdentityServiceClient(transport=transport) + + + """ + + def pre_get_business_identity( + self, + request: businessidentity.GetBusinessIdentityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[businessidentity.GetBusinessIdentityRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_business_identity + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessIdentityService server. + """ + return request, metadata + + def post_get_business_identity( + self, response: businessidentity.BusinessIdentity + ) -> businessidentity.BusinessIdentity: + """Post-rpc interceptor for get_business_identity + + Override in a subclass to manipulate the response + after it is returned by the BusinessIdentityService server but before + it is returned to user code. + """ + return response + + def pre_update_business_identity( + self, + request: businessidentity.UpdateBusinessIdentityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + businessidentity.UpdateBusinessIdentityRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_business_identity + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessIdentityService server. + """ + return request, metadata + + def post_update_business_identity( + self, response: businessidentity.BusinessIdentity + ) -> businessidentity.BusinessIdentity: + """Post-rpc interceptor for update_business_identity + + Override in a subclass to manipulate the response + after it is returned by the BusinessIdentityService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BusinessIdentityServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BusinessIdentityServiceRestInterceptor + + +class BusinessIdentityServiceRestTransport(BusinessIdentityServiceTransport): + """REST backend transport for BusinessIdentityService. + + Service to support `business + identity `__ + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BusinessIdentityServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BusinessIdentityServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetBusinessIdentity(BusinessIdentityServiceRestStub): + def __hash__(self): + return hash("GetBusinessIdentity") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: businessidentity.GetBusinessIdentityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessidentity.BusinessIdentity: + r"""Call the get business identity method over HTTP. + + Args: + request (~.businessidentity.GetBusinessIdentityRequest): + The request object. Request message for the ``GetBusinessIdentity`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.businessidentity.BusinessIdentity: + Collection of information related to the `identity of a + business `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/businessIdentity}", + }, + ] + request, metadata = self._interceptor.pre_get_business_identity( + request, metadata + ) + pb_request = businessidentity.GetBusinessIdentityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = businessidentity.BusinessIdentity() + pb_resp = businessidentity.BusinessIdentity.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_business_identity(resp) + return resp + + class _UpdateBusinessIdentity(BusinessIdentityServiceRestStub): + def __hash__(self): + return hash("UpdateBusinessIdentity") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: businessidentity.UpdateBusinessIdentityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessidentity.BusinessIdentity: + r"""Call the update business identity method over HTTP. + + Args: + request (~.businessidentity.UpdateBusinessIdentityRequest): + The request object. Request message for the ``UpdateBusinessIdentity`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.businessidentity.BusinessIdentity: + Collection of information related to the `identity of a + business `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{business_identity.name=accounts/*/businessIdentity}", + "body": "business_identity", + }, + ] + request, metadata = self._interceptor.pre_update_business_identity( + request, metadata + ) + pb_request = businessidentity.UpdateBusinessIdentityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = businessidentity.BusinessIdentity() + pb_resp = businessidentity.BusinessIdentity.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_business_identity(resp) + return resp + + @property + def get_business_identity( + self, + ) -> Callable[ + [businessidentity.GetBusinessIdentityRequest], businessidentity.BusinessIdentity + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBusinessIdentity(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_business_identity( + self, + ) -> Callable[ + [businessidentity.UpdateBusinessIdentityRequest], + businessidentity.BusinessIdentity, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBusinessIdentity(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BusinessIdentityServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/__init__.py new file mode 100644 index 000000000000..2960660ffb06 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BusinessInfoServiceAsyncClient +from .client import BusinessInfoServiceClient + +__all__ = ( + "BusinessInfoServiceClient", + "BusinessInfoServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py new file mode 100644 index 000000000000..1616d19d661e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import phone_number_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + businessinfo, + customerservice, + phoneverificationstate, +) + +from .client import BusinessInfoServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BusinessInfoServiceTransport +from .transports.grpc_asyncio import BusinessInfoServiceGrpcAsyncIOTransport + + +class BusinessInfoServiceAsyncClient: + """Service to support business info API.""" + + _client: BusinessInfoServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BusinessInfoServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BusinessInfoServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BusinessInfoServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BusinessInfoServiceClient._DEFAULT_UNIVERSE + + business_info_path = staticmethod(BusinessInfoServiceClient.business_info_path) + parse_business_info_path = staticmethod( + BusinessInfoServiceClient.parse_business_info_path + ) + common_billing_account_path = staticmethod( + BusinessInfoServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BusinessInfoServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BusinessInfoServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + BusinessInfoServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BusinessInfoServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BusinessInfoServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(BusinessInfoServiceClient.common_project_path) + parse_common_project_path = staticmethod( + BusinessInfoServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(BusinessInfoServiceClient.common_location_path) + parse_common_location_path = staticmethod( + BusinessInfoServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessInfoServiceAsyncClient: The constructed client. + """ + return BusinessInfoServiceClient.from_service_account_info.__func__(BusinessInfoServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessInfoServiceAsyncClient: The constructed client. + """ + return BusinessInfoServiceClient.from_service_account_file.__func__(BusinessInfoServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BusinessInfoServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BusinessInfoServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessInfoServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(BusinessInfoServiceClient).get_transport_class, + type(BusinessInfoServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessInfoServiceTransport, + Callable[..., BusinessInfoServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business info service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessInfoServiceTransport,Callable[..., BusinessInfoServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessInfoServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BusinessInfoServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_business_info( + self, + request: Optional[Union[businessinfo.GetBusinessInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessinfo.BusinessInfo: + r"""Retrieves the business info of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.get_business_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetBusinessInfoRequest, dict]]): + The request object. Request message for the ``GetBusinessInfo`` method. + name (:class:`str`): + Required. The resource name of the business info. + Format: ``accounts/{account}/businessInfo`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessInfo: + Collection of information related to + a business. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessinfo.GetBusinessInfoRequest): + request = businessinfo.GetBusinessInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_business_info + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_business_info( + self, + request: Optional[Union[businessinfo.UpdateBusinessInfoRequest, dict]] = None, + *, + business_info: Optional[businessinfo.BusinessInfo] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessinfo.BusinessInfo: + r"""Updates the business info of an account. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessInfoRequest( + ) + + # Make the request + response = await client.update_business_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateBusinessInfoRequest, dict]]): + The request object. Request message for the ``UpdateBusinessInfo`` method. + business_info (:class:`google.shopping.merchant_accounts_v1beta.types.BusinessInfo`): + Required. The new version of the + business info. + + This corresponds to the ``business_info`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessInfo: + Collection of information related to + a business. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([business_info, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessinfo.UpdateBusinessInfoRequest): + request = businessinfo.UpdateBusinessInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if business_info is not None: + request.business_info = business_info + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_business_info + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("business_info.name", request.business_info.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BusinessInfoServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BusinessInfoServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py new file mode 100644 index 000000000000..e298316f4707 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py @@ -0,0 +1,920 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import phone_number_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + businessinfo, + customerservice, + phoneverificationstate, +) + +from .transports.base import DEFAULT_CLIENT_INFO, BusinessInfoServiceTransport +from .transports.grpc import BusinessInfoServiceGrpcTransport +from .transports.grpc_asyncio import BusinessInfoServiceGrpcAsyncIOTransport +from .transports.rest import BusinessInfoServiceRestTransport + + +class BusinessInfoServiceClientMeta(type): + """Metaclass for the BusinessInfoService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[BusinessInfoServiceTransport]] + _transport_registry["grpc"] = BusinessInfoServiceGrpcTransport + _transport_registry["grpc_asyncio"] = BusinessInfoServiceGrpcAsyncIOTransport + _transport_registry["rest"] = BusinessInfoServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BusinessInfoServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BusinessInfoServiceClient(metaclass=BusinessInfoServiceClientMeta): + """Service to support business info API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessInfoServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessInfoServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BusinessInfoServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessInfoServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def business_info_path( + account: str, + ) -> str: + """Returns a fully-qualified business_info string.""" + return "accounts/{account}/businessInfo".format( + account=account, + ) + + @staticmethod + def parse_business_info_path(path: str) -> Dict[str, str]: + """Parses a business_info path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/businessInfo$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BusinessInfoServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BusinessInfoServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BusinessInfoServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BusinessInfoServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = BusinessInfoServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or BusinessInfoServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessInfoServiceTransport, + Callable[..., BusinessInfoServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business info service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessInfoServiceTransport,Callable[..., BusinessInfoServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessInfoServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BusinessInfoServiceClient._read_environment_variables() + self._client_cert_source = BusinessInfoServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = BusinessInfoServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BusinessInfoServiceTransport) + if transport_provided: + # transport is a BusinessInfoServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BusinessInfoServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or BusinessInfoServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BusinessInfoServiceTransport], + Callable[..., BusinessInfoServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BusinessInfoServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_business_info( + self, + request: Optional[Union[businessinfo.GetBusinessInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessinfo.BusinessInfo: + r"""Retrieves the business info of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessInfoRequest( + name="name_value", + ) + + # Make the request + response = client.get_business_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetBusinessInfoRequest, dict]): + The request object. Request message for the ``GetBusinessInfo`` method. + name (str): + Required. The resource name of the business info. + Format: ``accounts/{account}/businessInfo`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessInfo: + Collection of information related to + a business. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessinfo.GetBusinessInfoRequest): + request = businessinfo.GetBusinessInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_business_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_business_info( + self, + request: Optional[Union[businessinfo.UpdateBusinessInfoRequest, dict]] = None, + *, + business_info: Optional[businessinfo.BusinessInfo] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessinfo.BusinessInfo: + r"""Updates the business info of an account. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessInfoRequest( + ) + + # Make the request + response = client.update_business_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateBusinessInfoRequest, dict]): + The request object. Request message for the ``UpdateBusinessInfo`` method. + business_info (google.shopping.merchant_accounts_v1beta.types.BusinessInfo): + Required. The new version of the + business info. + + This corresponds to the ``business_info`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.BusinessInfo: + Collection of information related to + a business. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([business_info, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, businessinfo.UpdateBusinessInfoRequest): + request = businessinfo.UpdateBusinessInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if business_info is not None: + request.business_info = business_info + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_business_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("business_info.name", request.business_info.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BusinessInfoServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BusinessInfoServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/__init__.py new file mode 100644 index 000000000000..919acf2fd733 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BusinessInfoServiceTransport +from .grpc import BusinessInfoServiceGrpcTransport +from .grpc_asyncio import BusinessInfoServiceGrpcAsyncIOTransport +from .rest import BusinessInfoServiceRestInterceptor, BusinessInfoServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[BusinessInfoServiceTransport]] +_transport_registry["grpc"] = BusinessInfoServiceGrpcTransport +_transport_registry["grpc_asyncio"] = BusinessInfoServiceGrpcAsyncIOTransport +_transport_registry["rest"] = BusinessInfoServiceRestTransport + +__all__ = ( + "BusinessInfoServiceTransport", + "BusinessInfoServiceGrpcTransport", + "BusinessInfoServiceGrpcAsyncIOTransport", + "BusinessInfoServiceRestTransport", + "BusinessInfoServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/base.py new file mode 100644 index 000000000000..702ece3e5be5 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import businessinfo + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class BusinessInfoServiceTransport(abc.ABC): + """Abstract transport class for BusinessInfoService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_business_info: gapic_v1.method.wrap_method( + self.get_business_info, + default_timeout=None, + client_info=client_info, + ), + self.update_business_info: gapic_v1.method.wrap_method( + self.update_business_info, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_business_info( + self, + ) -> Callable[ + [businessinfo.GetBusinessInfoRequest], + Union[businessinfo.BusinessInfo, Awaitable[businessinfo.BusinessInfo]], + ]: + raise NotImplementedError() + + @property + def update_business_info( + self, + ) -> Callable[ + [businessinfo.UpdateBusinessInfoRequest], + Union[businessinfo.BusinessInfo, Awaitable[businessinfo.BusinessInfo]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BusinessInfoServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/grpc.py new file mode 100644 index 000000000000..f916a76ed81a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/grpc.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import businessinfo + +from .base import DEFAULT_CLIENT_INFO, BusinessInfoServiceTransport + + +class BusinessInfoServiceGrpcTransport(BusinessInfoServiceTransport): + """gRPC backend transport for BusinessInfoService. + + Service to support business info API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_business_info( + self, + ) -> Callable[[businessinfo.GetBusinessInfoRequest], businessinfo.BusinessInfo]: + r"""Return a callable for the get business info method over gRPC. + + Retrieves the business info of an account. + + Returns: + Callable[[~.GetBusinessInfoRequest], + ~.BusinessInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_business_info" not in self._stubs: + self._stubs["get_business_info"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessInfoService/GetBusinessInfo", + request_serializer=businessinfo.GetBusinessInfoRequest.serialize, + response_deserializer=businessinfo.BusinessInfo.deserialize, + ) + return self._stubs["get_business_info"] + + @property + def update_business_info( + self, + ) -> Callable[[businessinfo.UpdateBusinessInfoRequest], businessinfo.BusinessInfo]: + r"""Return a callable for the update business info method over gRPC. + + Updates the business info of an account. Executing + this method requires admin access. + + Returns: + Callable[[~.UpdateBusinessInfoRequest], + ~.BusinessInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_business_info" not in self._stubs: + self._stubs["update_business_info"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessInfoService/UpdateBusinessInfo", + request_serializer=businessinfo.UpdateBusinessInfoRequest.serialize, + response_deserializer=businessinfo.BusinessInfo.deserialize, + ) + return self._stubs["update_business_info"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BusinessInfoServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a774e98c2383 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/grpc_asyncio.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import businessinfo + +from .base import DEFAULT_CLIENT_INFO, BusinessInfoServiceTransport +from .grpc import BusinessInfoServiceGrpcTransport + + +class BusinessInfoServiceGrpcAsyncIOTransport(BusinessInfoServiceTransport): + """gRPC AsyncIO backend transport for BusinessInfoService. + + Service to support business info API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_business_info( + self, + ) -> Callable[ + [businessinfo.GetBusinessInfoRequest], Awaitable[businessinfo.BusinessInfo] + ]: + r"""Return a callable for the get business info method over gRPC. + + Retrieves the business info of an account. + + Returns: + Callable[[~.GetBusinessInfoRequest], + Awaitable[~.BusinessInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_business_info" not in self._stubs: + self._stubs["get_business_info"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessInfoService/GetBusinessInfo", + request_serializer=businessinfo.GetBusinessInfoRequest.serialize, + response_deserializer=businessinfo.BusinessInfo.deserialize, + ) + return self._stubs["get_business_info"] + + @property + def update_business_info( + self, + ) -> Callable[ + [businessinfo.UpdateBusinessInfoRequest], Awaitable[businessinfo.BusinessInfo] + ]: + r"""Return a callable for the update business info method over gRPC. + + Updates the business info of an account. Executing + this method requires admin access. + + Returns: + Callable[[~.UpdateBusinessInfoRequest], + Awaitable[~.BusinessInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_business_info" not in self._stubs: + self._stubs["update_business_info"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.BusinessInfoService/UpdateBusinessInfo", + request_serializer=businessinfo.UpdateBusinessInfoRequest.serialize, + response_deserializer=businessinfo.BusinessInfo.deserialize, + ) + return self._stubs["update_business_info"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_business_info: gapic_v1.method_async.wrap_method( + self.get_business_info, + default_timeout=None, + client_info=client_info, + ), + self.update_business_info: gapic_v1.method_async.wrap_method( + self.update_business_info, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("BusinessInfoServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/rest.py new file mode 100644 index 000000000000..9b95f0f8188a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/transports/rest.py @@ -0,0 +1,442 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import businessinfo + +from .base import BusinessInfoServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BusinessInfoServiceRestInterceptor: + """Interceptor for BusinessInfoService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BusinessInfoServiceRestTransport. + + .. code-block:: python + class MyCustomBusinessInfoServiceInterceptor(BusinessInfoServiceRestInterceptor): + def pre_get_business_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_business_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_business_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_business_info(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BusinessInfoServiceRestTransport(interceptor=MyCustomBusinessInfoServiceInterceptor()) + client = BusinessInfoServiceClient(transport=transport) + + + """ + + def pre_get_business_info( + self, + request: businessinfo.GetBusinessInfoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[businessinfo.GetBusinessInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_business_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessInfoService server. + """ + return request, metadata + + def post_get_business_info( + self, response: businessinfo.BusinessInfo + ) -> businessinfo.BusinessInfo: + """Post-rpc interceptor for get_business_info + + Override in a subclass to manipulate the response + after it is returned by the BusinessInfoService server but before + it is returned to user code. + """ + return response + + def pre_update_business_info( + self, + request: businessinfo.UpdateBusinessInfoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[businessinfo.UpdateBusinessInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_business_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessInfoService server. + """ + return request, metadata + + def post_update_business_info( + self, response: businessinfo.BusinessInfo + ) -> businessinfo.BusinessInfo: + """Post-rpc interceptor for update_business_info + + Override in a subclass to manipulate the response + after it is returned by the BusinessInfoService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BusinessInfoServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BusinessInfoServiceRestInterceptor + + +class BusinessInfoServiceRestTransport(BusinessInfoServiceTransport): + """REST backend transport for BusinessInfoService. + + Service to support business info API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BusinessInfoServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BusinessInfoServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetBusinessInfo(BusinessInfoServiceRestStub): + def __hash__(self): + return hash("GetBusinessInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: businessinfo.GetBusinessInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessinfo.BusinessInfo: + r"""Call the get business info method over HTTP. + + Args: + request (~.businessinfo.GetBusinessInfoRequest): + The request object. Request message for the ``GetBusinessInfo`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.businessinfo.BusinessInfo: + Collection of information related to + a business. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/businessInfo}", + }, + ] + request, metadata = self._interceptor.pre_get_business_info( + request, metadata + ) + pb_request = businessinfo.GetBusinessInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = businessinfo.BusinessInfo() + pb_resp = businessinfo.BusinessInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_business_info(resp) + return resp + + class _UpdateBusinessInfo(BusinessInfoServiceRestStub): + def __hash__(self): + return hash("UpdateBusinessInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: businessinfo.UpdateBusinessInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> businessinfo.BusinessInfo: + r"""Call the update business info method over HTTP. + + Args: + request (~.businessinfo.UpdateBusinessInfoRequest): + The request object. Request message for the ``UpdateBusinessInfo`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.businessinfo.BusinessInfo: + Collection of information related to + a business. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{business_info.name=accounts/*/businessInfo}", + "body": "business_info", + }, + ] + request, metadata = self._interceptor.pre_update_business_info( + request, metadata + ) + pb_request = businessinfo.UpdateBusinessInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = businessinfo.BusinessInfo() + pb_resp = businessinfo.BusinessInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_business_info(resp) + return resp + + @property + def get_business_info( + self, + ) -> Callable[[businessinfo.GetBusinessInfoRequest], businessinfo.BusinessInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBusinessInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_business_info( + self, + ) -> Callable[[businessinfo.UpdateBusinessInfoRequest], businessinfo.BusinessInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBusinessInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BusinessInfoServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/__init__.py new file mode 100644 index 000000000000..2f1f9846b88f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import EmailPreferencesServiceAsyncClient +from .client import EmailPreferencesServiceClient + +__all__ = ( + "EmailPreferencesServiceClient", + "EmailPreferencesServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py new file mode 100644 index 000000000000..03ef568cbddb --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py @@ -0,0 +1,541 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + +from .client import EmailPreferencesServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, EmailPreferencesServiceTransport +from .transports.grpc_asyncio import EmailPreferencesServiceGrpcAsyncIOTransport + + +class EmailPreferencesServiceAsyncClient: + """Service to support the ``EmailPreferences`` API. + + This service only permits retrieving and updating email preferences + for the authenticated user. + """ + + _client: EmailPreferencesServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = EmailPreferencesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EmailPreferencesServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + EmailPreferencesServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = EmailPreferencesServiceClient._DEFAULT_UNIVERSE + + email_preferences_path = staticmethod( + EmailPreferencesServiceClient.email_preferences_path + ) + parse_email_preferences_path = staticmethod( + EmailPreferencesServiceClient.parse_email_preferences_path + ) + common_billing_account_path = staticmethod( + EmailPreferencesServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EmailPreferencesServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(EmailPreferencesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + EmailPreferencesServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + EmailPreferencesServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + EmailPreferencesServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + EmailPreferencesServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + EmailPreferencesServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + EmailPreferencesServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + EmailPreferencesServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EmailPreferencesServiceAsyncClient: The constructed client. + """ + return EmailPreferencesServiceClient.from_service_account_info.__func__(EmailPreferencesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EmailPreferencesServiceAsyncClient: The constructed client. + """ + return EmailPreferencesServiceClient.from_service_account_file.__func__(EmailPreferencesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EmailPreferencesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EmailPreferencesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EmailPreferencesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(EmailPreferencesServiceClient).get_transport_class, + type(EmailPreferencesServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + EmailPreferencesServiceTransport, + Callable[..., EmailPreferencesServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the email preferences service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,EmailPreferencesServiceTransport,Callable[..., EmailPreferencesServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EmailPreferencesServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EmailPreferencesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_email_preferences( + self, + request: Optional[ + Union[emailpreferences.GetEmailPreferencesRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> emailpreferences.EmailPreferences: + r"""Returns the email preferences for a Merchant Center account + user. + + Use the `name=accounts/*/users/me/emailPreferences` alias to get + preferences for the authenticated user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetEmailPreferencesRequest( + name="name_value", + ) + + # Make the request + response = await client.get_email_preferences(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetEmailPreferencesRequest, dict]]): + The request object. Request message for + GetEmailPreferences method. + name (:class:`str`): + Required. The name of the ``EmailPreferences`` resource. + Format: + ``accounts/{account}/users/{email}/emailPreferences`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.EmailPreferences: + The categories of notifications the + user opted into / opted out of. The + email preferences do not include + mandatory announcements as users can't + opt out of them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, emailpreferences.GetEmailPreferencesRequest): + request = emailpreferences.GetEmailPreferencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_email_preferences + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_email_preferences( + self, + request: Optional[ + Union[emailpreferences.UpdateEmailPreferencesRequest, dict] + ] = None, + *, + email_preferences: Optional[emailpreferences.EmailPreferences] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> emailpreferences.EmailPreferences: + r"""Updates the email preferences for a Merchant Center account + user. MCA users should specify the MCA account rather than a + sub-account of the MCA. + + Preferences which are not explicitly selected in the update mask + will not be updated. + + It is invalid for updates to specify an UNCONFIRMED opt-in + status value. + + Use the `name=accounts/*/users/me/emailPreferences` alias to + update preferences for the authenticated user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateEmailPreferencesRequest( + ) + + # Make the request + response = await client.update_email_preferences(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateEmailPreferencesRequest, dict]]): + The request object. Request message for + UpdateEmailPreferences method. + email_preferences (:class:`google.shopping.merchant_accounts_v1beta.types.EmailPreferences`): + Required. Email Preferences to be + updated. + + This corresponds to the ``email_preferences`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.EmailPreferences: + The categories of notifications the + user opted into / opted out of. The + email preferences do not include + mandatory announcements as users can't + opt out of them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([email_preferences, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, emailpreferences.UpdateEmailPreferencesRequest): + request = emailpreferences.UpdateEmailPreferencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if email_preferences is not None: + request.email_preferences = email_preferences + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_email_preferences + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("email_preferences.name", request.email_preferences.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "EmailPreferencesServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EmailPreferencesServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py new file mode 100644 index 000000000000..5cfba0208653 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py @@ -0,0 +1,953 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + +from .transports.base import DEFAULT_CLIENT_INFO, EmailPreferencesServiceTransport +from .transports.grpc import EmailPreferencesServiceGrpcTransport +from .transports.grpc_asyncio import EmailPreferencesServiceGrpcAsyncIOTransport +from .transports.rest import EmailPreferencesServiceRestTransport + + +class EmailPreferencesServiceClientMeta(type): + """Metaclass for the EmailPreferencesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[EmailPreferencesServiceTransport]] + _transport_registry["grpc"] = EmailPreferencesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = EmailPreferencesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = EmailPreferencesServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EmailPreferencesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EmailPreferencesServiceClient(metaclass=EmailPreferencesServiceClientMeta): + """Service to support the ``EmailPreferences`` API. + + This service only permits retrieving and updating email preferences + for the authenticated user. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EmailPreferencesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EmailPreferencesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EmailPreferencesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EmailPreferencesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def email_preferences_path( + account: str, + email: str, + ) -> str: + """Returns a fully-qualified email_preferences string.""" + return "accounts/{account}/users/{email}/emailPreferences".format( + account=account, + email=email, + ) + + @staticmethod + def parse_email_preferences_path(path: str) -> Dict[str, str]: + """Parses a email_preferences path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/users/(?P.+?)/emailPreferences$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = EmailPreferencesServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = EmailPreferencesServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + EmailPreferencesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = EmailPreferencesServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = EmailPreferencesServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or EmailPreferencesServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + EmailPreferencesServiceTransport, + Callable[..., EmailPreferencesServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the email preferences service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,EmailPreferencesServiceTransport,Callable[..., EmailPreferencesServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EmailPreferencesServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = EmailPreferencesServiceClient._read_environment_variables() + self._client_cert_source = ( + EmailPreferencesServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = EmailPreferencesServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, EmailPreferencesServiceTransport) + if transport_provided: + # transport is a EmailPreferencesServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(EmailPreferencesServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or EmailPreferencesServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[EmailPreferencesServiceTransport], + Callable[..., EmailPreferencesServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., EmailPreferencesServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_email_preferences( + self, + request: Optional[ + Union[emailpreferences.GetEmailPreferencesRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> emailpreferences.EmailPreferences: + r"""Returns the email preferences for a Merchant Center account + user. + + Use the `name=accounts/*/users/me/emailPreferences` alias to get + preferences for the authenticated user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetEmailPreferencesRequest( + name="name_value", + ) + + # Make the request + response = client.get_email_preferences(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetEmailPreferencesRequest, dict]): + The request object. Request message for + GetEmailPreferences method. + name (str): + Required. The name of the ``EmailPreferences`` resource. + Format: + ``accounts/{account}/users/{email}/emailPreferences`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.EmailPreferences: + The categories of notifications the + user opted into / opted out of. The + email preferences do not include + mandatory announcements as users can't + opt out of them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, emailpreferences.GetEmailPreferencesRequest): + request = emailpreferences.GetEmailPreferencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_email_preferences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_email_preferences( + self, + request: Optional[ + Union[emailpreferences.UpdateEmailPreferencesRequest, dict] + ] = None, + *, + email_preferences: Optional[emailpreferences.EmailPreferences] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> emailpreferences.EmailPreferences: + r"""Updates the email preferences for a Merchant Center account + user. MCA users should specify the MCA account rather than a + sub-account of the MCA. + + Preferences which are not explicitly selected in the update mask + will not be updated. + + It is invalid for updates to specify an UNCONFIRMED opt-in + status value. + + Use the `name=accounts/*/users/me/emailPreferences` alias to + update preferences for the authenticated user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateEmailPreferencesRequest( + ) + + # Make the request + response = client.update_email_preferences(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateEmailPreferencesRequest, dict]): + The request object. Request message for + UpdateEmailPreferences method. + email_preferences (google.shopping.merchant_accounts_v1beta.types.EmailPreferences): + Required. Email Preferences to be + updated. + + This corresponds to the ``email_preferences`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.EmailPreferences: + The categories of notifications the + user opted into / opted out of. The + email preferences do not include + mandatory announcements as users can't + opt out of them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([email_preferences, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, emailpreferences.UpdateEmailPreferencesRequest): + request = emailpreferences.UpdateEmailPreferencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if email_preferences is not None: + request.email_preferences = email_preferences + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_email_preferences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("email_preferences.name", request.email_preferences.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EmailPreferencesServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EmailPreferencesServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/__init__.py new file mode 100644 index 000000000000..5f23adfbb6d9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EmailPreferencesServiceTransport +from .grpc import EmailPreferencesServiceGrpcTransport +from .grpc_asyncio import EmailPreferencesServiceGrpcAsyncIOTransport +from .rest import ( + EmailPreferencesServiceRestInterceptor, + EmailPreferencesServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[EmailPreferencesServiceTransport]] +_transport_registry["grpc"] = EmailPreferencesServiceGrpcTransport +_transport_registry["grpc_asyncio"] = EmailPreferencesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = EmailPreferencesServiceRestTransport + +__all__ = ( + "EmailPreferencesServiceTransport", + "EmailPreferencesServiceGrpcTransport", + "EmailPreferencesServiceGrpcAsyncIOTransport", + "EmailPreferencesServiceRestTransport", + "EmailPreferencesServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/base.py new file mode 100644 index 000000000000..869b2cafd430 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/base.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EmailPreferencesServiceTransport(abc.ABC): + """Abstract transport class for EmailPreferencesService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_email_preferences: gapic_v1.method.wrap_method( + self.get_email_preferences, + default_timeout=None, + client_info=client_info, + ), + self.update_email_preferences: gapic_v1.method.wrap_method( + self.update_email_preferences, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_email_preferences( + self, + ) -> Callable[ + [emailpreferences.GetEmailPreferencesRequest], + Union[ + emailpreferences.EmailPreferences, + Awaitable[emailpreferences.EmailPreferences], + ], + ]: + raise NotImplementedError() + + @property + def update_email_preferences( + self, + ) -> Callable[ + [emailpreferences.UpdateEmailPreferencesRequest], + Union[ + emailpreferences.EmailPreferences, + Awaitable[emailpreferences.EmailPreferences], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EmailPreferencesServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py new file mode 100644 index 000000000000..88fe499a7e14 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py @@ -0,0 +1,319 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + +from .base import DEFAULT_CLIENT_INFO, EmailPreferencesServiceTransport + + +class EmailPreferencesServiceGrpcTransport(EmailPreferencesServiceTransport): + """gRPC backend transport for EmailPreferencesService. + + Service to support the ``EmailPreferences`` API. + + This service only permits retrieving and updating email preferences + for the authenticated user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_email_preferences( + self, + ) -> Callable[ + [emailpreferences.GetEmailPreferencesRequest], emailpreferences.EmailPreferences + ]: + r"""Return a callable for the get email preferences method over gRPC. + + Returns the email preferences for a Merchant Center account + user. + + Use the `name=accounts/*/users/me/emailPreferences` alias to get + preferences for the authenticated user. + + Returns: + Callable[[~.GetEmailPreferencesRequest], + ~.EmailPreferences]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_email_preferences" not in self._stubs: + self._stubs["get_email_preferences"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.EmailPreferencesService/GetEmailPreferences", + request_serializer=emailpreferences.GetEmailPreferencesRequest.serialize, + response_deserializer=emailpreferences.EmailPreferences.deserialize, + ) + return self._stubs["get_email_preferences"] + + @property + def update_email_preferences( + self, + ) -> Callable[ + [emailpreferences.UpdateEmailPreferencesRequest], + emailpreferences.EmailPreferences, + ]: + r"""Return a callable for the update email preferences method over gRPC. + + Updates the email preferences for a Merchant Center account + user. MCA users should specify the MCA account rather than a + sub-account of the MCA. + + Preferences which are not explicitly selected in the update mask + will not be updated. + + It is invalid for updates to specify an UNCONFIRMED opt-in + status value. + + Use the `name=accounts/*/users/me/emailPreferences` alias to + update preferences for the authenticated user. + + Returns: + Callable[[~.UpdateEmailPreferencesRequest], + ~.EmailPreferences]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_email_preferences" not in self._stubs: + self._stubs["update_email_preferences"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.EmailPreferencesService/UpdateEmailPreferences", + request_serializer=emailpreferences.UpdateEmailPreferencesRequest.serialize, + response_deserializer=emailpreferences.EmailPreferences.deserialize, + ) + return self._stubs["update_email_preferences"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("EmailPreferencesServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2237ed6d1abf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + +from .base import DEFAULT_CLIENT_INFO, EmailPreferencesServiceTransport +from .grpc import EmailPreferencesServiceGrpcTransport + + +class EmailPreferencesServiceGrpcAsyncIOTransport(EmailPreferencesServiceTransport): + """gRPC AsyncIO backend transport for EmailPreferencesService. + + Service to support the ``EmailPreferences`` API. + + This service only permits retrieving and updating email preferences + for the authenticated user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_email_preferences( + self, + ) -> Callable[ + [emailpreferences.GetEmailPreferencesRequest], + Awaitable[emailpreferences.EmailPreferences], + ]: + r"""Return a callable for the get email preferences method over gRPC. + + Returns the email preferences for a Merchant Center account + user. + + Use the `name=accounts/*/users/me/emailPreferences` alias to get + preferences for the authenticated user. + + Returns: + Callable[[~.GetEmailPreferencesRequest], + Awaitable[~.EmailPreferences]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_email_preferences" not in self._stubs: + self._stubs["get_email_preferences"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.EmailPreferencesService/GetEmailPreferences", + request_serializer=emailpreferences.GetEmailPreferencesRequest.serialize, + response_deserializer=emailpreferences.EmailPreferences.deserialize, + ) + return self._stubs["get_email_preferences"] + + @property + def update_email_preferences( + self, + ) -> Callable[ + [emailpreferences.UpdateEmailPreferencesRequest], + Awaitable[emailpreferences.EmailPreferences], + ]: + r"""Return a callable for the update email preferences method over gRPC. + + Updates the email preferences for a Merchant Center account + user. MCA users should specify the MCA account rather than a + sub-account of the MCA. + + Preferences which are not explicitly selected in the update mask + will not be updated. + + It is invalid for updates to specify an UNCONFIRMED opt-in + status value. + + Use the `name=accounts/*/users/me/emailPreferences` alias to + update preferences for the authenticated user. + + Returns: + Callable[[~.UpdateEmailPreferencesRequest], + Awaitable[~.EmailPreferences]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_email_preferences" not in self._stubs: + self._stubs["update_email_preferences"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.EmailPreferencesService/UpdateEmailPreferences", + request_serializer=emailpreferences.UpdateEmailPreferencesRequest.serialize, + response_deserializer=emailpreferences.EmailPreferences.deserialize, + ) + return self._stubs["update_email_preferences"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_email_preferences: gapic_v1.method_async.wrap_method( + self.get_email_preferences, + default_timeout=None, + client_info=client_info, + ), + self.update_email_preferences: gapic_v1.method_async.wrap_method( + self.update_email_preferences, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("EmailPreferencesServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/rest.py new file mode 100644 index 000000000000..fddae29de426 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/rest.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EmailPreferencesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EmailPreferencesServiceRestInterceptor: + """Interceptor for EmailPreferencesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EmailPreferencesServiceRestTransport. + + .. code-block:: python + class MyCustomEmailPreferencesServiceInterceptor(EmailPreferencesServiceRestInterceptor): + def pre_get_email_preferences(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_email_preferences(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_email_preferences(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_email_preferences(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EmailPreferencesServiceRestTransport(interceptor=MyCustomEmailPreferencesServiceInterceptor()) + client = EmailPreferencesServiceClient(transport=transport) + + + """ + + def pre_get_email_preferences( + self, + request: emailpreferences.GetEmailPreferencesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[emailpreferences.GetEmailPreferencesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_email_preferences + + Override in a subclass to manipulate the request or metadata + before they are sent to the EmailPreferencesService server. + """ + return request, metadata + + def post_get_email_preferences( + self, response: emailpreferences.EmailPreferences + ) -> emailpreferences.EmailPreferences: + """Post-rpc interceptor for get_email_preferences + + Override in a subclass to manipulate the response + after it is returned by the EmailPreferencesService server but before + it is returned to user code. + """ + return response + + def pre_update_email_preferences( + self, + request: emailpreferences.UpdateEmailPreferencesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + emailpreferences.UpdateEmailPreferencesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_email_preferences + + Override in a subclass to manipulate the request or metadata + before they are sent to the EmailPreferencesService server. + """ + return request, metadata + + def post_update_email_preferences( + self, response: emailpreferences.EmailPreferences + ) -> emailpreferences.EmailPreferences: + """Post-rpc interceptor for update_email_preferences + + Override in a subclass to manipulate the response + after it is returned by the EmailPreferencesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EmailPreferencesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EmailPreferencesServiceRestInterceptor + + +class EmailPreferencesServiceRestTransport(EmailPreferencesServiceTransport): + """REST backend transport for EmailPreferencesService. + + Service to support the ``EmailPreferences`` API. + + This service only permits retrieving and updating email preferences + for the authenticated user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EmailPreferencesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EmailPreferencesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetEmailPreferences(EmailPreferencesServiceRestStub): + def __hash__(self): + return hash("GetEmailPreferences") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: emailpreferences.GetEmailPreferencesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> emailpreferences.EmailPreferences: + r"""Call the get email preferences method over HTTP. + + Args: + request (~.emailpreferences.GetEmailPreferencesRequest): + The request object. Request message for + GetEmailPreferences method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.emailpreferences.EmailPreferences: + The categories of notifications the + user opted into / opted out of. The + email preferences do not include + mandatory announcements as users can't + opt out of them. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/users/*/emailPreferences}", + }, + ] + request, metadata = self._interceptor.pre_get_email_preferences( + request, metadata + ) + pb_request = emailpreferences.GetEmailPreferencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = emailpreferences.EmailPreferences() + pb_resp = emailpreferences.EmailPreferences.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_email_preferences(resp) + return resp + + class _UpdateEmailPreferences(EmailPreferencesServiceRestStub): + def __hash__(self): + return hash("UpdateEmailPreferences") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: emailpreferences.UpdateEmailPreferencesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> emailpreferences.EmailPreferences: + r"""Call the update email preferences method over HTTP. + + Args: + request (~.emailpreferences.UpdateEmailPreferencesRequest): + The request object. Request message for + UpdateEmailPreferences method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.emailpreferences.EmailPreferences: + The categories of notifications the + user opted into / opted out of. The + email preferences do not include + mandatory announcements as users can't + opt out of them. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{email_preferences.name=accounts/*/users/*/emailPreferences}", + "body": "email_preferences", + }, + ] + request, metadata = self._interceptor.pre_update_email_preferences( + request, metadata + ) + pb_request = emailpreferences.UpdateEmailPreferencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = emailpreferences.EmailPreferences() + pb_resp = emailpreferences.EmailPreferences.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_email_preferences(resp) + return resp + + @property + def get_email_preferences( + self, + ) -> Callable[ + [emailpreferences.GetEmailPreferencesRequest], emailpreferences.EmailPreferences + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEmailPreferences(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_email_preferences( + self, + ) -> Callable[ + [emailpreferences.UpdateEmailPreferencesRequest], + emailpreferences.EmailPreferences, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEmailPreferences(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EmailPreferencesServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/__init__.py new file mode 100644 index 000000000000..cfabb52141c7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import HomepageServiceAsyncClient +from .client import HomepageServiceClient + +__all__ = ( + "HomepageServiceClient", + "HomepageServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py new file mode 100644 index 000000000000..433149cf930f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py @@ -0,0 +1,669 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + +from .client import HomepageServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, HomepageServiceTransport +from .transports.grpc_asyncio import HomepageServiceGrpcAsyncIOTransport + + +class HomepageServiceAsyncClient: + """Service to support an API for a store's homepage.""" + + _client: HomepageServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = HomepageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = HomepageServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = HomepageServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = HomepageServiceClient._DEFAULT_UNIVERSE + + homepage_path = staticmethod(HomepageServiceClient.homepage_path) + parse_homepage_path = staticmethod(HomepageServiceClient.parse_homepage_path) + common_billing_account_path = staticmethod( + HomepageServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + HomepageServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(HomepageServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + HomepageServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + HomepageServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + HomepageServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(HomepageServiceClient.common_project_path) + parse_common_project_path = staticmethod( + HomepageServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(HomepageServiceClient.common_location_path) + parse_common_location_path = staticmethod( + HomepageServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HomepageServiceAsyncClient: The constructed client. + """ + return HomepageServiceClient.from_service_account_info.__func__(HomepageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HomepageServiceAsyncClient: The constructed client. + """ + return HomepageServiceClient.from_service_account_file.__func__(HomepageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return HomepageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> HomepageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + HomepageServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(HomepageServiceClient).get_transport_class, type(HomepageServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, HomepageServiceTransport, Callable[..., HomepageServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the homepage service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,HomepageServiceTransport,Callable[..., HomepageServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the HomepageServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = HomepageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_homepage( + self, + request: Optional[Union[homepage.GetHomepageRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Retrieves a store's homepage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetHomepageRequest( + name="name_value", + ) + + # Make the request + response = await client.get_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetHomepageRequest, dict]]): + The request object. Request message for the ``GetHomepage`` method. + name (:class:`str`): + Required. The name of the homepage to retrieve. Format: + ``accounts/{account}/homepage`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, homepage.GetHomepageRequest): + request = homepage.GetHomepageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_homepage + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_homepage( + self, + request: Optional[Union[gsma_homepage.UpdateHomepageRequest, dict]] = None, + *, + homepage: Optional[gsma_homepage.Homepage] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_homepage.Homepage: + r"""Updates a store's homepage. Executing this method + requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateHomepageRequest( + ) + + # Make the request + response = await client.update_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateHomepageRequest, dict]]): + The request object. Request message for the ``UpdateHomepage`` method. + homepage (:class:`google.shopping.merchant_accounts_v1beta.types.Homepage`): + Required. The new version of the + homepage. + + This corresponds to the ``homepage`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([homepage, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_homepage.UpdateHomepageRequest): + request = gsma_homepage.UpdateHomepageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if homepage is not None: + request.homepage = homepage + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_homepage + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("homepage.name", request.homepage.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def claim_homepage( + self, + request: Optional[Union[homepage.ClaimHomepageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Claims a store's homepage. Executing this method requires admin + access. + + If the homepage is already claimed, this will recheck the + verification (unless the merchant is exempted from claiming, + which also exempts from verification) and return a successful + response. If ownership can no longer be verified, it will return + an error, but it won't clear the claim. In case of failure, a + canonical error message will be returned: \* PERMISSION_DENIED: + user doesn't have the necessary permissions on this MC account; + \* FAILED_PRECONDITION: - The account is not a Merchant Center + account; - MC account doesn't have a homepage; - claiming failed + (in this case the error message will contain more details). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_claim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ClaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = await client.claim_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ClaimHomepageRequest, dict]]): + The request object. Request message for the ``ClaimHomepage`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, homepage.ClaimHomepageRequest): + request = homepage.ClaimHomepageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.claim_homepage + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def unclaim_homepage( + self, + request: Optional[Union[homepage.UnclaimHomepageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Unclaims a store's homepage. Executing this method + requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_unclaim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UnclaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = await client.unclaim_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UnclaimHomepageRequest, dict]]): + The request object. Request message for the ``UnclaimHomepage`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, homepage.UnclaimHomepageRequest): + request = homepage.UnclaimHomepageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.unclaim_homepage + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "HomepageServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("HomepageServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py new file mode 100644 index 000000000000..4af54a563701 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py @@ -0,0 +1,1079 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + +from .transports.base import DEFAULT_CLIENT_INFO, HomepageServiceTransport +from .transports.grpc import HomepageServiceGrpcTransport +from .transports.grpc_asyncio import HomepageServiceGrpcAsyncIOTransport +from .transports.rest import HomepageServiceRestTransport + + +class HomepageServiceClientMeta(type): + """Metaclass for the HomepageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[HomepageServiceTransport]] + _transport_registry["grpc"] = HomepageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = HomepageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = HomepageServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[HomepageServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class HomepageServiceClient(metaclass=HomepageServiceClientMeta): + """Service to support an API for a store's homepage.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HomepageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HomepageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> HomepageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + HomepageServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def homepage_path( + account: str, + ) -> str: + """Returns a fully-qualified homepage string.""" + return "accounts/{account}/homepage".format( + account=account, + ) + + @staticmethod + def parse_homepage_path(path: str) -> Dict[str, str]: + """Parses a homepage path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/homepage$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = HomepageServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = HomepageServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = HomepageServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = HomepageServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = HomepageServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or HomepageServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, HomepageServiceTransport, Callable[..., HomepageServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the homepage service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,HomepageServiceTransport,Callable[..., HomepageServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the HomepageServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = HomepageServiceClient._read_environment_variables() + self._client_cert_source = HomepageServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = HomepageServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, HomepageServiceTransport) + if transport_provided: + # transport is a HomepageServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(HomepageServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or HomepageServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[HomepageServiceTransport], Callable[..., HomepageServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., HomepageServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_homepage( + self, + request: Optional[Union[homepage.GetHomepageRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Retrieves a store's homepage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetHomepageRequest( + name="name_value", + ) + + # Make the request + response = client.get_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetHomepageRequest, dict]): + The request object. Request message for the ``GetHomepage`` method. + name (str): + Required. The name of the homepage to retrieve. Format: + ``accounts/{account}/homepage`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, homepage.GetHomepageRequest): + request = homepage.GetHomepageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_homepage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_homepage( + self, + request: Optional[Union[gsma_homepage.UpdateHomepageRequest, dict]] = None, + *, + homepage: Optional[gsma_homepage.Homepage] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_homepage.Homepage: + r"""Updates a store's homepage. Executing this method + requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateHomepageRequest( + ) + + # Make the request + response = client.update_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateHomepageRequest, dict]): + The request object. Request message for the ``UpdateHomepage`` method. + homepage (google.shopping.merchant_accounts_v1beta.types.Homepage): + Required. The new version of the + homepage. + + This corresponds to the ``homepage`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([homepage, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_homepage.UpdateHomepageRequest): + request = gsma_homepage.UpdateHomepageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if homepage is not None: + request.homepage = homepage + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_homepage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("homepage.name", request.homepage.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def claim_homepage( + self, + request: Optional[Union[homepage.ClaimHomepageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Claims a store's homepage. Executing this method requires admin + access. + + If the homepage is already claimed, this will recheck the + verification (unless the merchant is exempted from claiming, + which also exempts from verification) and return a successful + response. If ownership can no longer be verified, it will return + an error, but it won't clear the claim. In case of failure, a + canonical error message will be returned: \* PERMISSION_DENIED: + user doesn't have the necessary permissions on this MC account; + \* FAILED_PRECONDITION: - The account is not a Merchant Center + account; - MC account doesn't have a homepage; - claiming failed + (in this case the error message will contain more details). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_claim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ClaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = client.claim_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ClaimHomepageRequest, dict]): + The request object. Request message for the ``ClaimHomepage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, homepage.ClaimHomepageRequest): + request = homepage.ClaimHomepageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.claim_homepage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def unclaim_homepage( + self, + request: Optional[Union[homepage.UnclaimHomepageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Unclaims a store's homepage. Executing this method + requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_unclaim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UnclaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = client.unclaim_homepage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UnclaimHomepageRequest, dict]): + The request object. Request message for the ``UnclaimHomepage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Homepage: + A store's homepage. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, homepage.UnclaimHomepageRequest): + request = homepage.UnclaimHomepageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.unclaim_homepage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "HomepageServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("HomepageServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/__init__.py new file mode 100644 index 000000000000..ad9d4ca2c7ca --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import HomepageServiceTransport +from .grpc import HomepageServiceGrpcTransport +from .grpc_asyncio import HomepageServiceGrpcAsyncIOTransport +from .rest import HomepageServiceRestInterceptor, HomepageServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[HomepageServiceTransport]] +_transport_registry["grpc"] = HomepageServiceGrpcTransport +_transport_registry["grpc_asyncio"] = HomepageServiceGrpcAsyncIOTransport +_transport_registry["rest"] = HomepageServiceRestTransport + +__all__ = ( + "HomepageServiceTransport", + "HomepageServiceGrpcTransport", + "HomepageServiceGrpcAsyncIOTransport", + "HomepageServiceRestTransport", + "HomepageServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/base.py new file mode 100644 index 000000000000..d71584de3021 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/base.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class HomepageServiceTransport(abc.ABC): + """Abstract transport class for HomepageService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_homepage: gapic_v1.method.wrap_method( + self.get_homepage, + default_timeout=None, + client_info=client_info, + ), + self.update_homepage: gapic_v1.method.wrap_method( + self.update_homepage, + default_timeout=None, + client_info=client_info, + ), + self.claim_homepage: gapic_v1.method.wrap_method( + self.claim_homepage, + default_timeout=None, + client_info=client_info, + ), + self.unclaim_homepage: gapic_v1.method.wrap_method( + self.unclaim_homepage, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_homepage( + self, + ) -> Callable[ + [homepage.GetHomepageRequest], + Union[homepage.Homepage, Awaitable[homepage.Homepage]], + ]: + raise NotImplementedError() + + @property + def update_homepage( + self, + ) -> Callable[ + [gsma_homepage.UpdateHomepageRequest], + Union[gsma_homepage.Homepage, Awaitable[gsma_homepage.Homepage]], + ]: + raise NotImplementedError() + + @property + def claim_homepage( + self, + ) -> Callable[ + [homepage.ClaimHomepageRequest], + Union[homepage.Homepage, Awaitable[homepage.Homepage]], + ]: + raise NotImplementedError() + + @property + def unclaim_homepage( + self, + ) -> Callable[ + [homepage.UnclaimHomepageRequest], + Union[homepage.Homepage, Awaitable[homepage.Homepage]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("HomepageServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/grpc.py new file mode 100644 index 000000000000..cafabba8b38b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/grpc.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + +from .base import DEFAULT_CLIENT_INFO, HomepageServiceTransport + + +class HomepageServiceGrpcTransport(HomepageServiceTransport): + """gRPC backend transport for HomepageService. + + Service to support an API for a store's homepage. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_homepage( + self, + ) -> Callable[[homepage.GetHomepageRequest], homepage.Homepage]: + r"""Return a callable for the get homepage method over gRPC. + + Retrieves a store's homepage. + + Returns: + Callable[[~.GetHomepageRequest], + ~.Homepage]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_homepage" not in self._stubs: + self._stubs["get_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/GetHomepage", + request_serializer=homepage.GetHomepageRequest.serialize, + response_deserializer=homepage.Homepage.deserialize, + ) + return self._stubs["get_homepage"] + + @property + def update_homepage( + self, + ) -> Callable[[gsma_homepage.UpdateHomepageRequest], gsma_homepage.Homepage]: + r"""Return a callable for the update homepage method over gRPC. + + Updates a store's homepage. Executing this method + requires admin access. + + Returns: + Callable[[~.UpdateHomepageRequest], + ~.Homepage]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_homepage" not in self._stubs: + self._stubs["update_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/UpdateHomepage", + request_serializer=gsma_homepage.UpdateHomepageRequest.serialize, + response_deserializer=gsma_homepage.Homepage.deserialize, + ) + return self._stubs["update_homepage"] + + @property + def claim_homepage( + self, + ) -> Callable[[homepage.ClaimHomepageRequest], homepage.Homepage]: + r"""Return a callable for the claim homepage method over gRPC. + + Claims a store's homepage. Executing this method requires admin + access. + + If the homepage is already claimed, this will recheck the + verification (unless the merchant is exempted from claiming, + which also exempts from verification) and return a successful + response. If ownership can no longer be verified, it will return + an error, but it won't clear the claim. In case of failure, a + canonical error message will be returned: \* PERMISSION_DENIED: + user doesn't have the necessary permissions on this MC account; + \* FAILED_PRECONDITION: - The account is not a Merchant Center + account; - MC account doesn't have a homepage; - claiming failed + (in this case the error message will contain more details). + + Returns: + Callable[[~.ClaimHomepageRequest], + ~.Homepage]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "claim_homepage" not in self._stubs: + self._stubs["claim_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/ClaimHomepage", + request_serializer=homepage.ClaimHomepageRequest.serialize, + response_deserializer=homepage.Homepage.deserialize, + ) + return self._stubs["claim_homepage"] + + @property + def unclaim_homepage( + self, + ) -> Callable[[homepage.UnclaimHomepageRequest], homepage.Homepage]: + r"""Return a callable for the unclaim homepage method over gRPC. + + Unclaims a store's homepage. Executing this method + requires admin access. + + Returns: + Callable[[~.UnclaimHomepageRequest], + ~.Homepage]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unclaim_homepage" not in self._stubs: + self._stubs["unclaim_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/UnclaimHomepage", + request_serializer=homepage.UnclaimHomepageRequest.serialize, + response_deserializer=homepage.Homepage.deserialize, + ) + return self._stubs["unclaim_homepage"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("HomepageServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c019eebb7b1c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/grpc_asyncio.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + +from .base import DEFAULT_CLIENT_INFO, HomepageServiceTransport +from .grpc import HomepageServiceGrpcTransport + + +class HomepageServiceGrpcAsyncIOTransport(HomepageServiceTransport): + """gRPC AsyncIO backend transport for HomepageService. + + Service to support an API for a store's homepage. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_homepage( + self, + ) -> Callable[[homepage.GetHomepageRequest], Awaitable[homepage.Homepage]]: + r"""Return a callable for the get homepage method over gRPC. + + Retrieves a store's homepage. + + Returns: + Callable[[~.GetHomepageRequest], + Awaitable[~.Homepage]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_homepage" not in self._stubs: + self._stubs["get_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/GetHomepage", + request_serializer=homepage.GetHomepageRequest.serialize, + response_deserializer=homepage.Homepage.deserialize, + ) + return self._stubs["get_homepage"] + + @property + def update_homepage( + self, + ) -> Callable[ + [gsma_homepage.UpdateHomepageRequest], Awaitable[gsma_homepage.Homepage] + ]: + r"""Return a callable for the update homepage method over gRPC. + + Updates a store's homepage. Executing this method + requires admin access. + + Returns: + Callable[[~.UpdateHomepageRequest], + Awaitable[~.Homepage]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_homepage" not in self._stubs: + self._stubs["update_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/UpdateHomepage", + request_serializer=gsma_homepage.UpdateHomepageRequest.serialize, + response_deserializer=gsma_homepage.Homepage.deserialize, + ) + return self._stubs["update_homepage"] + + @property + def claim_homepage( + self, + ) -> Callable[[homepage.ClaimHomepageRequest], Awaitable[homepage.Homepage]]: + r"""Return a callable for the claim homepage method over gRPC. + + Claims a store's homepage. Executing this method requires admin + access. + + If the homepage is already claimed, this will recheck the + verification (unless the merchant is exempted from claiming, + which also exempts from verification) and return a successful + response. If ownership can no longer be verified, it will return + an error, but it won't clear the claim. In case of failure, a + canonical error message will be returned: \* PERMISSION_DENIED: + user doesn't have the necessary permissions on this MC account; + \* FAILED_PRECONDITION: - The account is not a Merchant Center + account; - MC account doesn't have a homepage; - claiming failed + (in this case the error message will contain more details). + + Returns: + Callable[[~.ClaimHomepageRequest], + Awaitable[~.Homepage]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "claim_homepage" not in self._stubs: + self._stubs["claim_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/ClaimHomepage", + request_serializer=homepage.ClaimHomepageRequest.serialize, + response_deserializer=homepage.Homepage.deserialize, + ) + return self._stubs["claim_homepage"] + + @property + def unclaim_homepage( + self, + ) -> Callable[[homepage.UnclaimHomepageRequest], Awaitable[homepage.Homepage]]: + r"""Return a callable for the unclaim homepage method over gRPC. + + Unclaims a store's homepage. Executing this method + requires admin access. + + Returns: + Callable[[~.UnclaimHomepageRequest], + Awaitable[~.Homepage]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unclaim_homepage" not in self._stubs: + self._stubs["unclaim_homepage"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.HomepageService/UnclaimHomepage", + request_serializer=homepage.UnclaimHomepageRequest.serialize, + response_deserializer=homepage.Homepage.deserialize, + ) + return self._stubs["unclaim_homepage"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_homepage: gapic_v1.method_async.wrap_method( + self.get_homepage, + default_timeout=None, + client_info=client_info, + ), + self.update_homepage: gapic_v1.method_async.wrap_method( + self.update_homepage, + default_timeout=None, + client_info=client_info, + ), + self.claim_homepage: gapic_v1.method_async.wrap_method( + self.claim_homepage, + default_timeout=None, + client_info=client_info, + ), + self.unclaim_homepage: gapic_v1.method_async.wrap_method( + self.unclaim_homepage, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("HomepageServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/rest.py new file mode 100644 index 000000000000..2aba51fd6733 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/transports/rest.py @@ -0,0 +1,691 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import HomepageServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class HomepageServiceRestInterceptor: + """Interceptor for HomepageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the HomepageServiceRestTransport. + + .. code-block:: python + class MyCustomHomepageServiceInterceptor(HomepageServiceRestInterceptor): + def pre_claim_homepage(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_claim_homepage(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_homepage(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_homepage(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_unclaim_homepage(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_unclaim_homepage(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_homepage(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_homepage(self, response): + logging.log(f"Received response: {response}") + return response + + transport = HomepageServiceRestTransport(interceptor=MyCustomHomepageServiceInterceptor()) + client = HomepageServiceClient(transport=transport) + + + """ + + def pre_claim_homepage( + self, + request: homepage.ClaimHomepageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[homepage.ClaimHomepageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for claim_homepage + + Override in a subclass to manipulate the request or metadata + before they are sent to the HomepageService server. + """ + return request, metadata + + def post_claim_homepage(self, response: homepage.Homepage) -> homepage.Homepage: + """Post-rpc interceptor for claim_homepage + + Override in a subclass to manipulate the response + after it is returned by the HomepageService server but before + it is returned to user code. + """ + return response + + def pre_get_homepage( + self, request: homepage.GetHomepageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[homepage.GetHomepageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_homepage + + Override in a subclass to manipulate the request or metadata + before they are sent to the HomepageService server. + """ + return request, metadata + + def post_get_homepage(self, response: homepage.Homepage) -> homepage.Homepage: + """Post-rpc interceptor for get_homepage + + Override in a subclass to manipulate the response + after it is returned by the HomepageService server but before + it is returned to user code. + """ + return response + + def pre_unclaim_homepage( + self, + request: homepage.UnclaimHomepageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[homepage.UnclaimHomepageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for unclaim_homepage + + Override in a subclass to manipulate the request or metadata + before they are sent to the HomepageService server. + """ + return request, metadata + + def post_unclaim_homepage(self, response: homepage.Homepage) -> homepage.Homepage: + """Post-rpc interceptor for unclaim_homepage + + Override in a subclass to manipulate the response + after it is returned by the HomepageService server but before + it is returned to user code. + """ + return response + + def pre_update_homepage( + self, + request: gsma_homepage.UpdateHomepageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsma_homepage.UpdateHomepageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_homepage + + Override in a subclass to manipulate the request or metadata + before they are sent to the HomepageService server. + """ + return request, metadata + + def post_update_homepage( + self, response: gsma_homepage.Homepage + ) -> gsma_homepage.Homepage: + """Post-rpc interceptor for update_homepage + + Override in a subclass to manipulate the response + after it is returned by the HomepageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class HomepageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: HomepageServiceRestInterceptor + + +class HomepageServiceRestTransport(HomepageServiceTransport): + """REST backend transport for HomepageService. + + Service to support an API for a store's homepage. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[HomepageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or HomepageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ClaimHomepage(HomepageServiceRestStub): + def __hash__(self): + return hash("ClaimHomepage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: homepage.ClaimHomepageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Call the claim homepage method over HTTP. + + Args: + request (~.homepage.ClaimHomepageRequest): + The request object. Request message for the ``ClaimHomepage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.homepage.Homepage: + A store's homepage. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{name=accounts/*/homepage}:claim", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_claim_homepage(request, metadata) + pb_request = homepage.ClaimHomepageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = homepage.Homepage() + pb_resp = homepage.Homepage.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_claim_homepage(resp) + return resp + + class _GetHomepage(HomepageServiceRestStub): + def __hash__(self): + return hash("GetHomepage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: homepage.GetHomepageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Call the get homepage method over HTTP. + + Args: + request (~.homepage.GetHomepageRequest): + The request object. Request message for the ``GetHomepage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.homepage.Homepage: + A store's homepage. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/homepage}", + }, + ] + request, metadata = self._interceptor.pre_get_homepage(request, metadata) + pb_request = homepage.GetHomepageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = homepage.Homepage() + pb_resp = homepage.Homepage.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_homepage(resp) + return resp + + class _UnclaimHomepage(HomepageServiceRestStub): + def __hash__(self): + return hash("UnclaimHomepage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: homepage.UnclaimHomepageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> homepage.Homepage: + r"""Call the unclaim homepage method over HTTP. + + Args: + request (~.homepage.UnclaimHomepageRequest): + The request object. Request message for the ``UnclaimHomepage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.homepage.Homepage: + A store's homepage. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{name=accounts/*/homepage}:unclaim", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_unclaim_homepage( + request, metadata + ) + pb_request = homepage.UnclaimHomepageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = homepage.Homepage() + pb_resp = homepage.Homepage.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_unclaim_homepage(resp) + return resp + + class _UpdateHomepage(HomepageServiceRestStub): + def __hash__(self): + return hash("UpdateHomepage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsma_homepage.UpdateHomepageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_homepage.Homepage: + r"""Call the update homepage method over HTTP. + + Args: + request (~.gsma_homepage.UpdateHomepageRequest): + The request object. Request message for the ``UpdateHomepage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsma_homepage.Homepage: + A store's homepage. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{homepage.name=accounts/*/homepage}", + "body": "homepage", + }, + ] + request, metadata = self._interceptor.pre_update_homepage(request, metadata) + pb_request = gsma_homepage.UpdateHomepageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsma_homepage.Homepage() + pb_resp = gsma_homepage.Homepage.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_homepage(resp) + return resp + + @property + def claim_homepage( + self, + ) -> Callable[[homepage.ClaimHomepageRequest], homepage.Homepage]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClaimHomepage(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_homepage( + self, + ) -> Callable[[homepage.GetHomepageRequest], homepage.Homepage]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHomepage(self._session, self._host, self._interceptor) # type: ignore + + @property + def unclaim_homepage( + self, + ) -> Callable[[homepage.UnclaimHomepageRequest], homepage.Homepage]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UnclaimHomepage(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_homepage( + self, + ) -> Callable[[gsma_homepage.UpdateHomepageRequest], gsma_homepage.Homepage]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateHomepage(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("HomepageServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/__init__.py new file mode 100644 index 000000000000..012dbcaa041c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import OnlineReturnPolicyServiceAsyncClient +from .client import OnlineReturnPolicyServiceClient + +__all__ = ( + "OnlineReturnPolicyServiceClient", + "OnlineReturnPolicyServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py new file mode 100644 index 000000000000..2a2442bb3d5c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.online_return_policy_service import ( + pagers, +) +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + +from .client import OnlineReturnPolicyServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, OnlineReturnPolicyServiceTransport +from .transports.grpc_asyncio import OnlineReturnPolicyServiceGrpcAsyncIOTransport + + +class OnlineReturnPolicyServiceAsyncClient: + """The service facilitates the management of a merchant's remorse + return policy configuration, encompassing return policies for both + ads and free listings + + programs. This API defines the following resource model: + - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + """ + + _client: OnlineReturnPolicyServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = OnlineReturnPolicyServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = OnlineReturnPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + OnlineReturnPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + + online_return_policy_path = staticmethod( + OnlineReturnPolicyServiceClient.online_return_policy_path + ) + parse_online_return_policy_path = staticmethod( + OnlineReturnPolicyServiceClient.parse_online_return_policy_path + ) + common_billing_account_path = staticmethod( + OnlineReturnPolicyServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + OnlineReturnPolicyServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + OnlineReturnPolicyServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + OnlineReturnPolicyServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + OnlineReturnPolicyServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + OnlineReturnPolicyServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + OnlineReturnPolicyServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + OnlineReturnPolicyServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + OnlineReturnPolicyServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + OnlineReturnPolicyServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OnlineReturnPolicyServiceAsyncClient: The constructed client. + """ + return OnlineReturnPolicyServiceClient.from_service_account_info.__func__(OnlineReturnPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OnlineReturnPolicyServiceAsyncClient: The constructed client. + """ + return OnlineReturnPolicyServiceClient.from_service_account_file.__func__(OnlineReturnPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return OnlineReturnPolicyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> OnlineReturnPolicyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OnlineReturnPolicyServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(OnlineReturnPolicyServiceClient).get_transport_class, + type(OnlineReturnPolicyServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + OnlineReturnPolicyServiceTransport, + Callable[..., OnlineReturnPolicyServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the online return policy service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OnlineReturnPolicyServiceTransport,Callable[..., OnlineReturnPolicyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OnlineReturnPolicyServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = OnlineReturnPolicyServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_online_return_policy( + self, + request: Optional[ + Union[online_return_policy.GetOnlineReturnPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> online_return_policy.OnlineReturnPolicy: + r"""Gets an existing return policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_online_return_policy(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetOnlineReturnPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_online_return_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetOnlineReturnPolicyRequest, dict]]): + The request object. Request message for the ``GetOnlineReturnPolicy`` + method. + name (:class:`str`): + Required. The name of the return policy to retrieve. + Format: + ``accounts/{account}/onlineReturnPolicies/{return_policy}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy: + [Online return policy](\ https://support.google.com/merchants/answer/10220642) + object. This is currently used to represent return + policies for ads and free listings programs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, online_return_policy.GetOnlineReturnPolicyRequest): + request = online_return_policy.GetOnlineReturnPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_online_return_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_online_return_policies( + self, + request: Optional[ + Union[online_return_policy.ListOnlineReturnPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOnlineReturnPoliciesAsyncPager: + r"""Lists all existing return policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_online_return_policies(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListOnlineReturnPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_online_return_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesRequest, dict]]): + The request object. Request message for the ``ListOnlineReturnPolicies`` + method. + parent (:class:`str`): + Required. The merchant account for which to list return + policies. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.pagers.ListOnlineReturnPoliciesAsyncPager: + Response message for the ListOnlineReturnPolicies + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, online_return_policy.ListOnlineReturnPoliciesRequest + ): + request = online_return_policy.ListOnlineReturnPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_online_return_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOnlineReturnPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "OnlineReturnPolicyServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OnlineReturnPolicyServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py new file mode 100644 index 000000000000..a3eee5b0cefa --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py @@ -0,0 +1,943 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.online_return_policy_service import ( + pagers, +) +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + +from .transports.base import DEFAULT_CLIENT_INFO, OnlineReturnPolicyServiceTransport +from .transports.grpc import OnlineReturnPolicyServiceGrpcTransport +from .transports.grpc_asyncio import OnlineReturnPolicyServiceGrpcAsyncIOTransport +from .transports.rest import OnlineReturnPolicyServiceRestTransport + + +class OnlineReturnPolicyServiceClientMeta(type): + """Metaclass for the OnlineReturnPolicyService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OnlineReturnPolicyServiceTransport]] + _transport_registry["grpc"] = OnlineReturnPolicyServiceGrpcTransport + _transport_registry["grpc_asyncio"] = OnlineReturnPolicyServiceGrpcAsyncIOTransport + _transport_registry["rest"] = OnlineReturnPolicyServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OnlineReturnPolicyServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OnlineReturnPolicyServiceClient(metaclass=OnlineReturnPolicyServiceClientMeta): + """The service facilitates the management of a merchant's remorse + return policy configuration, encompassing return policies for both + ads and free listings + + programs. This API defines the following resource model: + - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OnlineReturnPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OnlineReturnPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OnlineReturnPolicyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OnlineReturnPolicyServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def online_return_policy_path( + account: str, + return_policy: str, + ) -> str: + """Returns a fully-qualified online_return_policy string.""" + return "accounts/{account}/onlineReturnPolicies/{return_policy}".format( + account=account, + return_policy=return_policy, + ) + + @staticmethod + def parse_online_return_policy_path(path: str) -> Dict[str, str]: + """Parses a online_return_policy path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/onlineReturnPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = OnlineReturnPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + OnlineReturnPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or OnlineReturnPolicyServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + OnlineReturnPolicyServiceTransport, + Callable[..., OnlineReturnPolicyServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the online return policy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OnlineReturnPolicyServiceTransport,Callable[..., OnlineReturnPolicyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OnlineReturnPolicyServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OnlineReturnPolicyServiceClient._read_environment_variables() + self._client_cert_source = ( + OnlineReturnPolicyServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = OnlineReturnPolicyServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, OnlineReturnPolicyServiceTransport) + if transport_provided: + # transport is a OnlineReturnPolicyServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(OnlineReturnPolicyServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or OnlineReturnPolicyServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[OnlineReturnPolicyServiceTransport], + Callable[..., OnlineReturnPolicyServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., OnlineReturnPolicyServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_online_return_policy( + self, + request: Optional[ + Union[online_return_policy.GetOnlineReturnPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> online_return_policy.OnlineReturnPolicy: + r"""Gets an existing return policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_online_return_policy(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetOnlineReturnPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_online_return_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetOnlineReturnPolicyRequest, dict]): + The request object. Request message for the ``GetOnlineReturnPolicy`` + method. + name (str): + Required. The name of the return policy to retrieve. + Format: + ``accounts/{account}/onlineReturnPolicies/{return_policy}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy: + [Online return policy](\ https://support.google.com/merchants/answer/10220642) + object. This is currently used to represent return + policies for ads and free listings programs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, online_return_policy.GetOnlineReturnPolicyRequest): + request = online_return_policy.GetOnlineReturnPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_online_return_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_online_return_policies( + self, + request: Optional[ + Union[online_return_policy.ListOnlineReturnPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOnlineReturnPoliciesPager: + r"""Lists all existing return policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_online_return_policies(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListOnlineReturnPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_online_return_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesRequest, dict]): + The request object. Request message for the ``ListOnlineReturnPolicies`` + method. + parent (str): + Required. The merchant account for which to list return + policies. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.pagers.ListOnlineReturnPoliciesPager: + Response message for the ListOnlineReturnPolicies + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, online_return_policy.ListOnlineReturnPoliciesRequest + ): + request = online_return_policy.ListOnlineReturnPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_online_return_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOnlineReturnPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OnlineReturnPolicyServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OnlineReturnPolicyServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/pagers.py new file mode 100644 index 000000000000..6e6687506c2f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/pagers.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + + +class ListOnlineReturnPoliciesPager: + """A pager for iterating through ``list_online_return_policies`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``online_return_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOnlineReturnPolicies`` requests and continue to iterate + through the ``online_return_policies`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., online_return_policy.ListOnlineReturnPoliciesResponse], + request: online_return_policy.ListOnlineReturnPoliciesRequest, + response: online_return_policy.ListOnlineReturnPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = online_return_policy.ListOnlineReturnPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[online_return_policy.ListOnlineReturnPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[online_return_policy.OnlineReturnPolicy]: + for page in self.pages: + yield from page.online_return_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOnlineReturnPoliciesAsyncPager: + """A pager for iterating through ``list_online_return_policies`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``online_return_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOnlineReturnPolicies`` requests and continue to iterate + through the ``online_return_policies`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[online_return_policy.ListOnlineReturnPoliciesResponse] + ], + request: online_return_policy.ListOnlineReturnPoliciesRequest, + response: online_return_policy.ListOnlineReturnPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = online_return_policy.ListOnlineReturnPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[online_return_policy.ListOnlineReturnPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[online_return_policy.OnlineReturnPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.online_return_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/__init__.py new file mode 100644 index 000000000000..4f9ee1664fc0 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OnlineReturnPolicyServiceTransport +from .grpc import OnlineReturnPolicyServiceGrpcTransport +from .grpc_asyncio import OnlineReturnPolicyServiceGrpcAsyncIOTransport +from .rest import ( + OnlineReturnPolicyServiceRestInterceptor, + OnlineReturnPolicyServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[OnlineReturnPolicyServiceTransport]] +_transport_registry["grpc"] = OnlineReturnPolicyServiceGrpcTransport +_transport_registry["grpc_asyncio"] = OnlineReturnPolicyServiceGrpcAsyncIOTransport +_transport_registry["rest"] = OnlineReturnPolicyServiceRestTransport + +__all__ = ( + "OnlineReturnPolicyServiceTransport", + "OnlineReturnPolicyServiceGrpcTransport", + "OnlineReturnPolicyServiceGrpcAsyncIOTransport", + "OnlineReturnPolicyServiceRestTransport", + "OnlineReturnPolicyServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/base.py new file mode 100644 index 000000000000..8ba007e58c71 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/base.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class OnlineReturnPolicyServiceTransport(abc.ABC): + """Abstract transport class for OnlineReturnPolicyService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_online_return_policy: gapic_v1.method.wrap_method( + self.get_online_return_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_online_return_policies: gapic_v1.method.wrap_method( + self.list_online_return_policies, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_online_return_policy( + self, + ) -> Callable[ + [online_return_policy.GetOnlineReturnPolicyRequest], + Union[ + online_return_policy.OnlineReturnPolicy, + Awaitable[online_return_policy.OnlineReturnPolicy], + ], + ]: + raise NotImplementedError() + + @property + def list_online_return_policies( + self, + ) -> Callable[ + [online_return_policy.ListOnlineReturnPoliciesRequest], + Union[ + online_return_policy.ListOnlineReturnPoliciesResponse, + Awaitable[online_return_policy.ListOnlineReturnPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OnlineReturnPolicyServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py new file mode 100644 index 000000000000..f6fd47a83d20 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + +from .base import DEFAULT_CLIENT_INFO, OnlineReturnPolicyServiceTransport + + +class OnlineReturnPolicyServiceGrpcTransport(OnlineReturnPolicyServiceTransport): + """gRPC backend transport for OnlineReturnPolicyService. + + The service facilitates the management of a merchant's remorse + return policy configuration, encompassing return policies for both + ads and free listings + + programs. This API defines the following resource model: + - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_online_return_policy( + self, + ) -> Callable[ + [online_return_policy.GetOnlineReturnPolicyRequest], + online_return_policy.OnlineReturnPolicy, + ]: + r"""Return a callable for the get online return policy method over gRPC. + + Gets an existing return policy. + + Returns: + Callable[[~.GetOnlineReturnPolicyRequest], + ~.OnlineReturnPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_online_return_policy" not in self._stubs: + self._stubs["get_online_return_policy"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService/GetOnlineReturnPolicy", + request_serializer=online_return_policy.GetOnlineReturnPolicyRequest.serialize, + response_deserializer=online_return_policy.OnlineReturnPolicy.deserialize, + ) + return self._stubs["get_online_return_policy"] + + @property + def list_online_return_policies( + self, + ) -> Callable[ + [online_return_policy.ListOnlineReturnPoliciesRequest], + online_return_policy.ListOnlineReturnPoliciesResponse, + ]: + r"""Return a callable for the list online return policies method over gRPC. + + Lists all existing return policies. + + Returns: + Callable[[~.ListOnlineReturnPoliciesRequest], + ~.ListOnlineReturnPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_online_return_policies" not in self._stubs: + self._stubs["list_online_return_policies"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService/ListOnlineReturnPolicies", + request_serializer=online_return_policy.ListOnlineReturnPoliciesRequest.serialize, + response_deserializer=online_return_policy.ListOnlineReturnPoliciesResponse.deserialize, + ) + return self._stubs["list_online_return_policies"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("OnlineReturnPolicyServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..abc5cd13bc99 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py @@ -0,0 +1,322 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + +from .base import DEFAULT_CLIENT_INFO, OnlineReturnPolicyServiceTransport +from .grpc import OnlineReturnPolicyServiceGrpcTransport + + +class OnlineReturnPolicyServiceGrpcAsyncIOTransport(OnlineReturnPolicyServiceTransport): + """gRPC AsyncIO backend transport for OnlineReturnPolicyService. + + The service facilitates the management of a merchant's remorse + return policy configuration, encompassing return policies for both + ads and free listings + + programs. This API defines the following resource model: + - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_online_return_policy( + self, + ) -> Callable[ + [online_return_policy.GetOnlineReturnPolicyRequest], + Awaitable[online_return_policy.OnlineReturnPolicy], + ]: + r"""Return a callable for the get online return policy method over gRPC. + + Gets an existing return policy. + + Returns: + Callable[[~.GetOnlineReturnPolicyRequest], + Awaitable[~.OnlineReturnPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_online_return_policy" not in self._stubs: + self._stubs["get_online_return_policy"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService/GetOnlineReturnPolicy", + request_serializer=online_return_policy.GetOnlineReturnPolicyRequest.serialize, + response_deserializer=online_return_policy.OnlineReturnPolicy.deserialize, + ) + return self._stubs["get_online_return_policy"] + + @property + def list_online_return_policies( + self, + ) -> Callable[ + [online_return_policy.ListOnlineReturnPoliciesRequest], + Awaitable[online_return_policy.ListOnlineReturnPoliciesResponse], + ]: + r"""Return a callable for the list online return policies method over gRPC. + + Lists all existing return policies. + + Returns: + Callable[[~.ListOnlineReturnPoliciesRequest], + Awaitable[~.ListOnlineReturnPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_online_return_policies" not in self._stubs: + self._stubs["list_online_return_policies"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService/ListOnlineReturnPolicies", + request_serializer=online_return_policy.ListOnlineReturnPoliciesRequest.serialize, + response_deserializer=online_return_policy.ListOnlineReturnPoliciesResponse.deserialize, + ) + return self._stubs["list_online_return_policies"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_online_return_policy: gapic_v1.method_async.wrap_method( + self.get_online_return_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_online_return_policies: gapic_v1.method_async.wrap_method( + self.list_online_return_policies, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("OnlineReturnPolicyServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py new file mode 100644 index 000000000000..220765f2ddd8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py @@ -0,0 +1,455 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OnlineReturnPolicyServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OnlineReturnPolicyServiceRestInterceptor: + """Interceptor for OnlineReturnPolicyService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OnlineReturnPolicyServiceRestTransport. + + .. code-block:: python + class MyCustomOnlineReturnPolicyServiceInterceptor(OnlineReturnPolicyServiceRestInterceptor): + def pre_get_online_return_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_online_return_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_online_return_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_online_return_policies(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OnlineReturnPolicyServiceRestTransport(interceptor=MyCustomOnlineReturnPolicyServiceInterceptor()) + client = OnlineReturnPolicyServiceClient(transport=transport) + + + """ + + def pre_get_online_return_policy( + self, + request: online_return_policy.GetOnlineReturnPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + online_return_policy.GetOnlineReturnPolicyRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_online_return_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the OnlineReturnPolicyService server. + """ + return request, metadata + + def post_get_online_return_policy( + self, response: online_return_policy.OnlineReturnPolicy + ) -> online_return_policy.OnlineReturnPolicy: + """Post-rpc interceptor for get_online_return_policy + + Override in a subclass to manipulate the response + after it is returned by the OnlineReturnPolicyService server but before + it is returned to user code. + """ + return response + + def pre_list_online_return_policies( + self, + request: online_return_policy.ListOnlineReturnPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + online_return_policy.ListOnlineReturnPoliciesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_online_return_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the OnlineReturnPolicyService server. + """ + return request, metadata + + def post_list_online_return_policies( + self, response: online_return_policy.ListOnlineReturnPoliciesResponse + ) -> online_return_policy.ListOnlineReturnPoliciesResponse: + """Post-rpc interceptor for list_online_return_policies + + Override in a subclass to manipulate the response + after it is returned by the OnlineReturnPolicyService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OnlineReturnPolicyServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OnlineReturnPolicyServiceRestInterceptor + + +class OnlineReturnPolicyServiceRestTransport(OnlineReturnPolicyServiceTransport): + """REST backend transport for OnlineReturnPolicyService. + + The service facilitates the management of a merchant's remorse + return policy configuration, encompassing return policies for both + ads and free listings + + programs. This API defines the following resource model: + - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OnlineReturnPolicyServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OnlineReturnPolicyServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetOnlineReturnPolicy(OnlineReturnPolicyServiceRestStub): + def __hash__(self): + return hash("GetOnlineReturnPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: online_return_policy.GetOnlineReturnPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> online_return_policy.OnlineReturnPolicy: + r"""Call the get online return policy method over HTTP. + + Args: + request (~.online_return_policy.GetOnlineReturnPolicyRequest): + The request object. Request message for the ``GetOnlineReturnPolicy`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.online_return_policy.OnlineReturnPolicy: + `Online return + policy `__ + object. This is currently used to represent return + policies for ads and free listings programs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/onlineReturnPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_online_return_policy( + request, metadata + ) + pb_request = online_return_policy.GetOnlineReturnPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = online_return_policy.OnlineReturnPolicy() + pb_resp = online_return_policy.OnlineReturnPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_online_return_policy(resp) + return resp + + class _ListOnlineReturnPolicies(OnlineReturnPolicyServiceRestStub): + def __hash__(self): + return hash("ListOnlineReturnPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: online_return_policy.ListOnlineReturnPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> online_return_policy.ListOnlineReturnPoliciesResponse: + r"""Call the list online return + policies method over HTTP. + + Args: + request (~.online_return_policy.ListOnlineReturnPoliciesRequest): + The request object. Request message for the ``ListOnlineReturnPolicies`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.online_return_policy.ListOnlineReturnPoliciesResponse: + Response message for the ``ListOnlineReturnPolicies`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/onlineReturnPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_online_return_policies( + request, metadata + ) + pb_request = online_return_policy.ListOnlineReturnPoliciesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = online_return_policy.ListOnlineReturnPoliciesResponse() + pb_resp = online_return_policy.ListOnlineReturnPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_online_return_policies(resp) + return resp + + @property + def get_online_return_policy( + self, + ) -> Callable[ + [online_return_policy.GetOnlineReturnPolicyRequest], + online_return_policy.OnlineReturnPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOnlineReturnPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_online_return_policies( + self, + ) -> Callable[ + [online_return_policy.ListOnlineReturnPoliciesRequest], + online_return_policy.ListOnlineReturnPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOnlineReturnPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OnlineReturnPolicyServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/__init__.py new file mode 100644 index 000000000000..490e8cf3c204 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ProgramsServiceAsyncClient +from .client import ProgramsServiceClient + +__all__ = ( + "ProgramsServiceClient", + "ProgramsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py new file mode 100644 index 000000000000..161f52260e21 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py @@ -0,0 +1,748 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.programs_service import pagers +from google.shopping.merchant_accounts_v1beta.types import programs + +from .client import ProgramsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ProgramsServiceTransport +from .transports.grpc_asyncio import ProgramsServiceGrpcAsyncIOTransport + + +class ProgramsServiceAsyncClient: + """Service for program management. + + Programs provide a mechanism for adding functionality to merchant + accounts. A typical example of this is the `Free product + listings `__ + program, which enables products from a merchant's store to be shown + across Google for free. + + This service exposes methods to retrieve a merchant's participation + in all available programs, in addition to methods for explicitly + enabling or disabling participation in each program. + """ + + _client: ProgramsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProgramsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProgramsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProgramsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProgramsServiceClient._DEFAULT_UNIVERSE + + program_path = staticmethod(ProgramsServiceClient.program_path) + parse_program_path = staticmethod(ProgramsServiceClient.parse_program_path) + common_billing_account_path = staticmethod( + ProgramsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProgramsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProgramsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ProgramsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ProgramsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ProgramsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProgramsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ProgramsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ProgramsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ProgramsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProgramsServiceAsyncClient: The constructed client. + """ + return ProgramsServiceClient.from_service_account_info.__func__(ProgramsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProgramsServiceAsyncClient: The constructed client. + """ + return ProgramsServiceClient.from_service_account_file.__func__(ProgramsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProgramsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProgramsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProgramsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ProgramsServiceClient).get_transport_class, type(ProgramsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ProgramsServiceTransport, Callable[..., ProgramsServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the programs service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProgramsServiceTransport,Callable[..., ProgramsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProgramsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProgramsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_program( + self, + request: Optional[Union[programs.GetProgramRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Retrieves the specified program for the account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetProgramRequest( + name="name_value", + ) + + # Make the request + response = await client.get_program(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetProgramRequest, dict]]): + The request object. Request message for the GetProgram + method. + name (:class:`str`): + Required. The name of the program to retrieve. Format: + ``accounts/{account}/programs/{program}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality + to merchant accounts. A typical example of this is + the [Free product + listings](\ https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&sjid=796648681813264022-EU) + program, which enables products from a merchant's + store to be shown across Google for free. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.GetProgramRequest): + request = programs.GetProgramRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_program + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_programs( + self, + request: Optional[Union[programs.ListProgramsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProgramsAsyncPager: + r"""Retrieves all programs for the account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_programs(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListProgramsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_programs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListProgramsRequest, dict]]): + The request object. Request message for the ListPrograms + method. + parent (:class:`str`): + Required. The name of the account for which to retrieve + all programs. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.programs_service.pagers.ListProgramsAsyncPager: + Response message for the ListPrograms + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.ListProgramsRequest): + request = programs.ListProgramsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_programs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListProgramsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enable_program( + self, + request: Optional[Union[programs.EnableProgramRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Enable participation in the specified program for the + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_enable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.EnableProgramRequest( + name="name_value", + ) + + # Make the request + response = await client.enable_program(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.EnableProgramRequest, dict]]): + The request object. Request message for the EnableProgram + method. + name (:class:`str`): + Required. The name of the program for which to enable + participation for the given account. Format: + ``accounts/{account}/programs/{program}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality + to merchant accounts. A typical example of this is + the [Free product + listings](\ https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&sjid=796648681813264022-EU) + program, which enables products from a merchant's + store to be shown across Google for free. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.EnableProgramRequest): + request = programs.EnableProgramRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enable_program + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def disable_program( + self, + request: Optional[Union[programs.DisableProgramRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Disable participation in the specified program for + the account. Executing this method requires admin + access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_disable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DisableProgramRequest( + name="name_value", + ) + + # Make the request + response = await client.disable_program(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.DisableProgramRequest, dict]]): + The request object. Request message for the + DisableProgram method. + name (:class:`str`): + Required. The name of the program for which to disable + participation for the given account. Format: + ``accounts/{account}/programs/{program}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality + to merchant accounts. A typical example of this is + the [Free product + listings](\ https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&sjid=796648681813264022-EU) + program, which enables products from a merchant's + store to be shown across Google for free. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.DisableProgramRequest): + request = programs.DisableProgramRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.disable_program + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ProgramsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProgramsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py new file mode 100644 index 000000000000..907672d1d5e5 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py @@ -0,0 +1,1158 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.programs_service import pagers +from google.shopping.merchant_accounts_v1beta.types import programs + +from .transports.base import DEFAULT_CLIENT_INFO, ProgramsServiceTransport +from .transports.grpc import ProgramsServiceGrpcTransport +from .transports.grpc_asyncio import ProgramsServiceGrpcAsyncIOTransport +from .transports.rest import ProgramsServiceRestTransport + + +class ProgramsServiceClientMeta(type): + """Metaclass for the ProgramsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ProgramsServiceTransport]] + _transport_registry["grpc"] = ProgramsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ProgramsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ProgramsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProgramsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProgramsServiceClient(metaclass=ProgramsServiceClientMeta): + """Service for program management. + + Programs provide a mechanism for adding functionality to merchant + accounts. A typical example of this is the `Free product + listings `__ + program, which enables products from a merchant's store to be shown + across Google for free. + + This service exposes methods to retrieve a merchant's participation + in all available programs, in addition to methods for explicitly + enabling or disabling participation in each program. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProgramsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProgramsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProgramsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProgramsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def program_path( + account: str, + program: str, + ) -> str: + """Returns a fully-qualified program string.""" + return "accounts/{account}/programs/{program}".format( + account=account, + program=program, + ) + + @staticmethod + def parse_program_path(path: str) -> Dict[str, str]: + """Parses a program path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/programs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProgramsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProgramsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProgramsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProgramsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProgramsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProgramsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ProgramsServiceTransport, Callable[..., ProgramsServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the programs service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProgramsServiceTransport,Callable[..., ProgramsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProgramsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProgramsServiceClient._read_environment_variables() + self._client_cert_source = ProgramsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProgramsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProgramsServiceTransport) + if transport_provided: + # transport is a ProgramsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProgramsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ProgramsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProgramsServiceTransport], Callable[..., ProgramsServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProgramsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_program( + self, + request: Optional[Union[programs.GetProgramRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Retrieves the specified program for the account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetProgramRequest( + name="name_value", + ) + + # Make the request + response = client.get_program(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetProgramRequest, dict]): + The request object. Request message for the GetProgram + method. + name (str): + Required. The name of the program to retrieve. Format: + ``accounts/{account}/programs/{program}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality + to merchant accounts. A typical example of this is + the [Free product + listings](\ https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&sjid=796648681813264022-EU) + program, which enables products from a merchant's + store to be shown across Google for free. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.GetProgramRequest): + request = programs.GetProgramRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_program] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_programs( + self, + request: Optional[Union[programs.ListProgramsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProgramsPager: + r"""Retrieves all programs for the account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_programs(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListProgramsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_programs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListProgramsRequest, dict]): + The request object. Request message for the ListPrograms + method. + parent (str): + Required. The name of the account for which to retrieve + all programs. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.programs_service.pagers.ListProgramsPager: + Response message for the ListPrograms + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.ListProgramsRequest): + request = programs.ListProgramsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_programs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListProgramsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_program( + self, + request: Optional[Union[programs.EnableProgramRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Enable participation in the specified program for the + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_enable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.EnableProgramRequest( + name="name_value", + ) + + # Make the request + response = client.enable_program(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.EnableProgramRequest, dict]): + The request object. Request message for the EnableProgram + method. + name (str): + Required. The name of the program for which to enable + participation for the given account. Format: + ``accounts/{account}/programs/{program}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality + to merchant accounts. A typical example of this is + the [Free product + listings](\ https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&sjid=796648681813264022-EU) + program, which enables products from a merchant's + store to be shown across Google for free. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.EnableProgramRequest): + request = programs.EnableProgramRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_program] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def disable_program( + self, + request: Optional[Union[programs.DisableProgramRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Disable participation in the specified program for + the account. Executing this method requires admin + access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_disable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DisableProgramRequest( + name="name_value", + ) + + # Make the request + response = client.disable_program(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.DisableProgramRequest, dict]): + The request object. Request message for the + DisableProgram method. + name (str): + Required. The name of the program for which to disable + participation for the given account. Format: + ``accounts/{account}/programs/{program}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality + to merchant accounts. A typical example of this is + the [Free product + listings](\ https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&sjid=796648681813264022-EU) + program, which enables products from a merchant's + store to be shown across Google for free. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, programs.DisableProgramRequest): + request = programs.DisableProgramRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_program] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ProgramsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProgramsServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/pagers.py new file mode 100644 index 000000000000..9763dcb06f49 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_accounts_v1beta.types import programs + + +class ListProgramsPager: + """A pager for iterating through ``list_programs`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListProgramsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``programs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPrograms`` requests and continue to iterate + through the ``programs`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListProgramsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., programs.ListProgramsResponse], + request: programs.ListProgramsRequest, + response: programs.ListProgramsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListProgramsRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListProgramsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = programs.ListProgramsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[programs.ListProgramsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[programs.Program]: + for page in self.pages: + yield from page.programs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListProgramsAsyncPager: + """A pager for iterating through ``list_programs`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListProgramsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``programs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPrograms`` requests and continue to iterate + through the ``programs`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListProgramsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[programs.ListProgramsResponse]], + request: programs.ListProgramsRequest, + response: programs.ListProgramsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListProgramsRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListProgramsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = programs.ListProgramsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[programs.ListProgramsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[programs.Program]: + async def async_generator(): + async for page in self.pages: + for response in page.programs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/__init__.py new file mode 100644 index 000000000000..51407aa87f60 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProgramsServiceTransport +from .grpc import ProgramsServiceGrpcTransport +from .grpc_asyncio import ProgramsServiceGrpcAsyncIOTransport +from .rest import ProgramsServiceRestInterceptor, ProgramsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ProgramsServiceTransport]] +_transport_registry["grpc"] = ProgramsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ProgramsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ProgramsServiceRestTransport + +__all__ = ( + "ProgramsServiceTransport", + "ProgramsServiceGrpcTransport", + "ProgramsServiceGrpcAsyncIOTransport", + "ProgramsServiceRestTransport", + "ProgramsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/base.py new file mode 100644 index 000000000000..77385c5506cd --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/base.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import programs + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProgramsServiceTransport(abc.ABC): + """Abstract transport class for ProgramsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_program: gapic_v1.method.wrap_method( + self.get_program, + default_timeout=None, + client_info=client_info, + ), + self.list_programs: gapic_v1.method.wrap_method( + self.list_programs, + default_timeout=None, + client_info=client_info, + ), + self.enable_program: gapic_v1.method.wrap_method( + self.enable_program, + default_timeout=None, + client_info=client_info, + ), + self.disable_program: gapic_v1.method.wrap_method( + self.disable_program, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_program( + self, + ) -> Callable[ + [programs.GetProgramRequest], + Union[programs.Program, Awaitable[programs.Program]], + ]: + raise NotImplementedError() + + @property + def list_programs( + self, + ) -> Callable[ + [programs.ListProgramsRequest], + Union[programs.ListProgramsResponse, Awaitable[programs.ListProgramsResponse]], + ]: + raise NotImplementedError() + + @property + def enable_program( + self, + ) -> Callable[ + [programs.EnableProgramRequest], + Union[programs.Program, Awaitable[programs.Program]], + ]: + raise NotImplementedError() + + @property + def disable_program( + self, + ) -> Callable[ + [programs.DisableProgramRequest], + Union[programs.Program, Awaitable[programs.Program]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProgramsServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/grpc.py new file mode 100644 index 000000000000..b2bbd2586afd --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/grpc.py @@ -0,0 +1,359 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import programs + +from .base import DEFAULT_CLIENT_INFO, ProgramsServiceTransport + + +class ProgramsServiceGrpcTransport(ProgramsServiceTransport): + """gRPC backend transport for ProgramsService. + + Service for program management. + + Programs provide a mechanism for adding functionality to merchant + accounts. A typical example of this is the `Free product + listings `__ + program, which enables products from a merchant's store to be shown + across Google for free. + + This service exposes methods to retrieve a merchant's participation + in all available programs, in addition to methods for explicitly + enabling or disabling participation in each program. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_program(self) -> Callable[[programs.GetProgramRequest], programs.Program]: + r"""Return a callable for the get program method over gRPC. + + Retrieves the specified program for the account. + + Returns: + Callable[[~.GetProgramRequest], + ~.Program]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_program" not in self._stubs: + self._stubs["get_program"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/GetProgram", + request_serializer=programs.GetProgramRequest.serialize, + response_deserializer=programs.Program.deserialize, + ) + return self._stubs["get_program"] + + @property + def list_programs( + self, + ) -> Callable[[programs.ListProgramsRequest], programs.ListProgramsResponse]: + r"""Return a callable for the list programs method over gRPC. + + Retrieves all programs for the account. + + Returns: + Callable[[~.ListProgramsRequest], + ~.ListProgramsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_programs" not in self._stubs: + self._stubs["list_programs"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/ListPrograms", + request_serializer=programs.ListProgramsRequest.serialize, + response_deserializer=programs.ListProgramsResponse.deserialize, + ) + return self._stubs["list_programs"] + + @property + def enable_program( + self, + ) -> Callable[[programs.EnableProgramRequest], programs.Program]: + r"""Return a callable for the enable program method over gRPC. + + Enable participation in the specified program for the + account. Executing this method requires admin access. + + Returns: + Callable[[~.EnableProgramRequest], + ~.Program]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_program" not in self._stubs: + self._stubs["enable_program"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/EnableProgram", + request_serializer=programs.EnableProgramRequest.serialize, + response_deserializer=programs.Program.deserialize, + ) + return self._stubs["enable_program"] + + @property + def disable_program( + self, + ) -> Callable[[programs.DisableProgramRequest], programs.Program]: + r"""Return a callable for the disable program method over gRPC. + + Disable participation in the specified program for + the account. Executing this method requires admin + access. + + Returns: + Callable[[~.DisableProgramRequest], + ~.Program]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_program" not in self._stubs: + self._stubs["disable_program"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/DisableProgram", + request_serializer=programs.DisableProgramRequest.serialize, + response_deserializer=programs.Program.deserialize, + ) + return self._stubs["disable_program"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProgramsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..79c7b0407527 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/grpc_asyncio.py @@ -0,0 +1,388 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import programs + +from .base import DEFAULT_CLIENT_INFO, ProgramsServiceTransport +from .grpc import ProgramsServiceGrpcTransport + + +class ProgramsServiceGrpcAsyncIOTransport(ProgramsServiceTransport): + """gRPC AsyncIO backend transport for ProgramsService. + + Service for program management. + + Programs provide a mechanism for adding functionality to merchant + accounts. A typical example of this is the `Free product + listings `__ + program, which enables products from a merchant's store to be shown + across Google for free. + + This service exposes methods to retrieve a merchant's participation + in all available programs, in addition to methods for explicitly + enabling or disabling participation in each program. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_program( + self, + ) -> Callable[[programs.GetProgramRequest], Awaitable[programs.Program]]: + r"""Return a callable for the get program method over gRPC. + + Retrieves the specified program for the account. + + Returns: + Callable[[~.GetProgramRequest], + Awaitable[~.Program]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_program" not in self._stubs: + self._stubs["get_program"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/GetProgram", + request_serializer=programs.GetProgramRequest.serialize, + response_deserializer=programs.Program.deserialize, + ) + return self._stubs["get_program"] + + @property + def list_programs( + self, + ) -> Callable[ + [programs.ListProgramsRequest], Awaitable[programs.ListProgramsResponse] + ]: + r"""Return a callable for the list programs method over gRPC. + + Retrieves all programs for the account. + + Returns: + Callable[[~.ListProgramsRequest], + Awaitable[~.ListProgramsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_programs" not in self._stubs: + self._stubs["list_programs"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/ListPrograms", + request_serializer=programs.ListProgramsRequest.serialize, + response_deserializer=programs.ListProgramsResponse.deserialize, + ) + return self._stubs["list_programs"] + + @property + def enable_program( + self, + ) -> Callable[[programs.EnableProgramRequest], Awaitable[programs.Program]]: + r"""Return a callable for the enable program method over gRPC. + + Enable participation in the specified program for the + account. Executing this method requires admin access. + + Returns: + Callable[[~.EnableProgramRequest], + Awaitable[~.Program]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_program" not in self._stubs: + self._stubs["enable_program"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/EnableProgram", + request_serializer=programs.EnableProgramRequest.serialize, + response_deserializer=programs.Program.deserialize, + ) + return self._stubs["enable_program"] + + @property + def disable_program( + self, + ) -> Callable[[programs.DisableProgramRequest], Awaitable[programs.Program]]: + r"""Return a callable for the disable program method over gRPC. + + Disable participation in the specified program for + the account. Executing this method requires admin + access. + + Returns: + Callable[[~.DisableProgramRequest], + Awaitable[~.Program]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_program" not in self._stubs: + self._stubs["disable_program"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ProgramsService/DisableProgram", + request_serializer=programs.DisableProgramRequest.serialize, + response_deserializer=programs.Program.deserialize, + ) + return self._stubs["disable_program"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_program: gapic_v1.method_async.wrap_method( + self.get_program, + default_timeout=None, + client_info=client_info, + ), + self.list_programs: gapic_v1.method_async.wrap_method( + self.list_programs, + default_timeout=None, + client_info=client_info, + ), + self.enable_program: gapic_v1.method_async.wrap_method( + self.enable_program, + default_timeout=None, + client_info=client_info, + ), + self.disable_program: gapic_v1.method_async.wrap_method( + self.disable_program, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ProgramsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/rest.py new file mode 100644 index 000000000000..717744840799 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/transports/rest.py @@ -0,0 +1,718 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import programs + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProgramsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProgramsServiceRestInterceptor: + """Interceptor for ProgramsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProgramsServiceRestTransport. + + .. code-block:: python + class MyCustomProgramsServiceInterceptor(ProgramsServiceRestInterceptor): + def pre_disable_program(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_program(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_program(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_program(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_program(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_program(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_programs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_programs(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProgramsServiceRestTransport(interceptor=MyCustomProgramsServiceInterceptor()) + client = ProgramsServiceClient(transport=transport) + + + """ + + def pre_disable_program( + self, + request: programs.DisableProgramRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[programs.DisableProgramRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for disable_program + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProgramsService server. + """ + return request, metadata + + def post_disable_program(self, response: programs.Program) -> programs.Program: + """Post-rpc interceptor for disable_program + + Override in a subclass to manipulate the response + after it is returned by the ProgramsService server but before + it is returned to user code. + """ + return response + + def pre_enable_program( + self, + request: programs.EnableProgramRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[programs.EnableProgramRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enable_program + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProgramsService server. + """ + return request, metadata + + def post_enable_program(self, response: programs.Program) -> programs.Program: + """Post-rpc interceptor for enable_program + + Override in a subclass to manipulate the response + after it is returned by the ProgramsService server but before + it is returned to user code. + """ + return response + + def pre_get_program( + self, request: programs.GetProgramRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[programs.GetProgramRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_program + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProgramsService server. + """ + return request, metadata + + def post_get_program(self, response: programs.Program) -> programs.Program: + """Post-rpc interceptor for get_program + + Override in a subclass to manipulate the response + after it is returned by the ProgramsService server but before + it is returned to user code. + """ + return response + + def pre_list_programs( + self, request: programs.ListProgramsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[programs.ListProgramsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_programs + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProgramsService server. + """ + return request, metadata + + def post_list_programs( + self, response: programs.ListProgramsResponse + ) -> programs.ListProgramsResponse: + """Post-rpc interceptor for list_programs + + Override in a subclass to manipulate the response + after it is returned by the ProgramsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProgramsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProgramsServiceRestInterceptor + + +class ProgramsServiceRestTransport(ProgramsServiceTransport): + """REST backend transport for ProgramsService. + + Service for program management. + + Programs provide a mechanism for adding functionality to merchant + accounts. A typical example of this is the `Free product + listings `__ + program, which enables products from a merchant's store to be shown + across Google for free. + + This service exposes methods to retrieve a merchant's participation + in all available programs, in addition to methods for explicitly + enabling or disabling participation in each program. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProgramsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProgramsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DisableProgram(ProgramsServiceRestStub): + def __hash__(self): + return hash("DisableProgram") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: programs.DisableProgramRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Call the disable program method over HTTP. + + Args: + request (~.programs.DisableProgramRequest): + The request object. Request message for the + DisableProgram method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.programs.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality to + merchant accounts. A typical example of this is the + `Free product + listings `__ + program, which enables products from a merchant's store + to be shown across Google for free. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{name=accounts/*/programs/*}:disable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_program(request, metadata) + pb_request = programs.DisableProgramRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = programs.Program() + pb_resp = programs.Program.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_program(resp) + return resp + + class _EnableProgram(ProgramsServiceRestStub): + def __hash__(self): + return hash("EnableProgram") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: programs.EnableProgramRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Call the enable program method over HTTP. + + Args: + request (~.programs.EnableProgramRequest): + The request object. Request message for the EnableProgram + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.programs.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality to + merchant accounts. A typical example of this is the + `Free product + listings `__ + program, which enables products from a merchant's store + to be shown across Google for free. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{name=accounts/*/programs/*}:enable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_program(request, metadata) + pb_request = programs.EnableProgramRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = programs.Program() + pb_resp = programs.Program.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_program(resp) + return resp + + class _GetProgram(ProgramsServiceRestStub): + def __hash__(self): + return hash("GetProgram") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: programs.GetProgramRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.Program: + r"""Call the get program method over HTTP. + + Args: + request (~.programs.GetProgramRequest): + The request object. Request message for the GetProgram + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.programs.Program: + Defines participation in a given program for the + specified account. + + Programs provide a mechanism for adding functionality to + merchant accounts. A typical example of this is the + `Free product + listings `__ + program, which enables products from a merchant's store + to be shown across Google for free. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/programs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_program(request, metadata) + pb_request = programs.GetProgramRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = programs.Program() + pb_resp = programs.Program.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_program(resp) + return resp + + class _ListPrograms(ProgramsServiceRestStub): + def __hash__(self): + return hash("ListPrograms") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: programs.ListProgramsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> programs.ListProgramsResponse: + r"""Call the list programs method over HTTP. + + Args: + request (~.programs.ListProgramsRequest): + The request object. Request message for the ListPrograms + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.programs.ListProgramsResponse: + Response message for the ListPrograms + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/programs", + }, + ] + request, metadata = self._interceptor.pre_list_programs(request, metadata) + pb_request = programs.ListProgramsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = programs.ListProgramsResponse() + pb_resp = programs.ListProgramsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_programs(resp) + return resp + + @property + def disable_program( + self, + ) -> Callable[[programs.DisableProgramRequest], programs.Program]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableProgram(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_program( + self, + ) -> Callable[[programs.EnableProgramRequest], programs.Program]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableProgram(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_program(self) -> Callable[[programs.GetProgramRequest], programs.Program]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProgram(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_programs( + self, + ) -> Callable[[programs.ListProgramsRequest], programs.ListProgramsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPrograms(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProgramsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/__init__.py new file mode 100644 index 000000000000..c2cfc9da92ec --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import RegionsServiceAsyncClient +from .client import RegionsServiceClient + +__all__ = ( + "RegionsServiceClient", + "RegionsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py new file mode 100644 index 000000000000..866508f75a66 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py @@ -0,0 +1,856 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.regions_service import pagers +from google.shopping.merchant_accounts_v1beta.types import regions + +from .client import RegionsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, RegionsServiceTransport +from .transports.grpc_asyncio import RegionsServiceGrpcAsyncIOTransport + + +class RegionsServiceAsyncClient: + """Manages regions configuration. + + This API defines the following resource model: + + - [Region][google.shopping.merchant.accounts.v1main.Region] + """ + + _client: RegionsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = RegionsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RegionsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = RegionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = RegionsServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(RegionsServiceClient.account_path) + parse_account_path = staticmethod(RegionsServiceClient.parse_account_path) + region_path = staticmethod(RegionsServiceClient.region_path) + parse_region_path = staticmethod(RegionsServiceClient.parse_region_path) + common_billing_account_path = staticmethod( + RegionsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + RegionsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(RegionsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + RegionsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + RegionsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + RegionsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(RegionsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + RegionsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(RegionsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + RegionsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionsServiceAsyncClient: The constructed client. + """ + return RegionsServiceClient.from_service_account_info.__func__(RegionsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionsServiceAsyncClient: The constructed client. + """ + return RegionsServiceClient.from_service_account_file.__func__(RegionsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RegionsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RegionsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RegionsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(RegionsServiceClient).get_transport_class, type(RegionsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, RegionsServiceTransport, Callable[..., RegionsServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the regions service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionsServiceTransport,Callable[..., RegionsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RegionsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_region( + self, + request: Optional[Union[regions.GetRegionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Retrieves a region defined in your Merchant Center + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetRegionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_region(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetRegionRequest, dict]]): + The request object. Request message for the ``GetRegion`` method. + name (:class:`str`): + Required. The name of the region to retrieve. Format: + ``accounts/{account}/regions/{region}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Region: + Represents a geographic region that you can use as a target with both the + RegionalInventory and ShippingSettings services. You + can define regions as collections of either postal + codes or, in some countries, using predefined + geotargets. For more information, see [Set up regions + ](\ https://support.google.com/merchants/answer/7410946#zippy=%2Ccreate-a-new-region) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.GetRegionRequest): + request = regions.GetRegionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_region + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_region( + self, + request: Optional[Union[regions.CreateRegionRequest, dict]] = None, + *, + parent: Optional[str] = None, + region: Optional[regions.Region] = None, + region_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Creates a region definition in your Merchant Center + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_create_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateRegionRequest( + parent="parent_value", + region_id="region_id_value", + ) + + # Make the request + response = await client.create_region(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.CreateRegionRequest, dict]]): + The request object. Request message for the ``CreateRegion`` method. + parent (:class:`str`): + Required. The account to create a region for. Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`google.shopping.merchant_accounts_v1beta.types.Region`): + Required. The region to create. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_id (:class:`str`): + Required. The identifier for the + region, unique over all regions of the + same account. + + This corresponds to the ``region_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Region: + Represents a geographic region that you can use as a target with both the + RegionalInventory and ShippingSettings services. You + can define regions as collections of either postal + codes or, in some countries, using predefined + geotargets. For more information, see [Set up regions + ](\ https://support.google.com/merchants/answer/7410946#zippy=%2Ccreate-a-new-region) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, region, region_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.CreateRegionRequest): + request = regions.CreateRegionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if region is not None: + request.region = region + if region_id is not None: + request.region_id = region_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_region + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_region( + self, + request: Optional[Union[regions.UpdateRegionRequest, dict]] = None, + *, + region: Optional[regions.Region] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Updates a region definition in your Merchant Center + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateRegionRequest( + ) + + # Make the request + response = await client.update_region(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateRegionRequest, dict]]): + The request object. Request message for the ``UpdateRegion`` method. + region (:class:`google.shopping.merchant_accounts_v1beta.types.Region`): + Required. The updated region. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The comma-separated field mask indicating the + fields to update. Example: + ``"displayName,postalCodeArea.regionCode"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Region: + Represents a geographic region that you can use as a target with both the + RegionalInventory and ShippingSettings services. You + can define regions as collections of either postal + codes or, in some countries, using predefined + geotargets. For more information, see [Set up regions + ](\ https://support.google.com/merchants/answer/7410946#zippy=%2Ccreate-a-new-region) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([region, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.UpdateRegionRequest): + request = regions.UpdateRegionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if region is not None: + request.region = region + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_region + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("region.name", request.region.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_region( + self, + request: Optional[Union[regions.DeleteRegionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a region definition from your Merchant Center + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_delete_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteRegionRequest( + name="name_value", + ) + + # Make the request + await client.delete_region(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.DeleteRegionRequest, dict]]): + The request object. Request message for the ``DeleteRegion`` method. + name (:class:`str`): + Required. The name of the region to delete. Format: + ``accounts/{account}/regions/{region}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.DeleteRegionRequest): + request = regions.DeleteRegionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_region + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_regions( + self, + request: Optional[Union[regions.ListRegionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRegionsAsyncPager: + r"""Lists the regions in your Merchant Center account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_regions(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListRegionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_regions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListRegionsRequest, dict]]): + The request object. Request message for the ``ListRegions`` method. + parent (:class:`str`): + Required. The account to list regions for. Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.regions_service.pagers.ListRegionsAsyncPager: + Response message for the ListRegions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.ListRegionsRequest): + request = regions.ListRegionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_regions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRegionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "RegionsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RegionsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py new file mode 100644 index 000000000000..fa2d80737be2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py @@ -0,0 +1,1276 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.regions_service import pagers +from google.shopping.merchant_accounts_v1beta.types import regions + +from .transports.base import DEFAULT_CLIENT_INFO, RegionsServiceTransport +from .transports.grpc import RegionsServiceGrpcTransport +from .transports.grpc_asyncio import RegionsServiceGrpcAsyncIOTransport +from .transports.rest import RegionsServiceRestTransport + + +class RegionsServiceClientMeta(type): + """Metaclass for the RegionsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[RegionsServiceTransport]] + _transport_registry["grpc"] = RegionsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = RegionsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = RegionsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RegionsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionsServiceClient(metaclass=RegionsServiceClientMeta): + """Manages regions configuration. + + This API defines the following resource model: + + - [Region][google.shopping.merchant.accounts.v1main.Region] + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RegionsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def region_path( + account: str, + region: str, + ) -> str: + """Returns a fully-qualified region string.""" + return "accounts/{account}/regions/{region}".format( + account=account, + region=region, + ) + + @staticmethod + def parse_region_path(path: str) -> Dict[str, str]: + """Parses a region path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/regions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RegionsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RegionsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = RegionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RegionsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = RegionsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or RegionsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, RegionsServiceTransport, Callable[..., RegionsServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the regions service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionsServiceTransport,Callable[..., RegionsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = RegionsServiceClient._read_environment_variables() + self._client_cert_source = RegionsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = RegionsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RegionsServiceTransport) + if transport_provided: + # transport is a RegionsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(RegionsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RegionsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RegionsServiceTransport], Callable[..., RegionsServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_region( + self, + request: Optional[Union[regions.GetRegionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Retrieves a region defined in your Merchant Center + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetRegionRequest( + name="name_value", + ) + + # Make the request + response = client.get_region(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetRegionRequest, dict]): + The request object. Request message for the ``GetRegion`` method. + name (str): + Required. The name of the region to retrieve. Format: + ``accounts/{account}/regions/{region}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Region: + Represents a geographic region that you can use as a target with both the + RegionalInventory and ShippingSettings services. You + can define regions as collections of either postal + codes or, in some countries, using predefined + geotargets. For more information, see [Set up regions + ](\ https://support.google.com/merchants/answer/7410946#zippy=%2Ccreate-a-new-region) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.GetRegionRequest): + request = regions.GetRegionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_region] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_region( + self, + request: Optional[Union[regions.CreateRegionRequest, dict]] = None, + *, + parent: Optional[str] = None, + region: Optional[regions.Region] = None, + region_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Creates a region definition in your Merchant Center + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_create_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateRegionRequest( + parent="parent_value", + region_id="region_id_value", + ) + + # Make the request + response = client.create_region(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.CreateRegionRequest, dict]): + The request object. Request message for the ``CreateRegion`` method. + parent (str): + Required. The account to create a region for. Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (google.shopping.merchant_accounts_v1beta.types.Region): + Required. The region to create. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_id (str): + Required. The identifier for the + region, unique over all regions of the + same account. + + This corresponds to the ``region_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Region: + Represents a geographic region that you can use as a target with both the + RegionalInventory and ShippingSettings services. You + can define regions as collections of either postal + codes or, in some countries, using predefined + geotargets. For more information, see [Set up regions + ](\ https://support.google.com/merchants/answer/7410946#zippy=%2Ccreate-a-new-region) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, region, region_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.CreateRegionRequest): + request = regions.CreateRegionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if region is not None: + request.region = region + if region_id is not None: + request.region_id = region_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_region] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_region( + self, + request: Optional[Union[regions.UpdateRegionRequest, dict]] = None, + *, + region: Optional[regions.Region] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Updates a region definition in your Merchant Center + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateRegionRequest( + ) + + # Make the request + response = client.update_region(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateRegionRequest, dict]): + The request object. Request message for the ``UpdateRegion`` method. + region (google.shopping.merchant_accounts_v1beta.types.Region): + Required. The updated region. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The comma-separated field mask indicating the + fields to update. Example: + ``"displayName,postalCodeArea.regionCode"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.Region: + Represents a geographic region that you can use as a target with both the + RegionalInventory and ShippingSettings services. You + can define regions as collections of either postal + codes or, in some countries, using predefined + geotargets. For more information, see [Set up regions + ](\ https://support.google.com/merchants/answer/7410946#zippy=%2Ccreate-a-new-region) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([region, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.UpdateRegionRequest): + request = regions.UpdateRegionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if region is not None: + request.region = region + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_region] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("region.name", request.region.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_region( + self, + request: Optional[Union[regions.DeleteRegionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a region definition from your Merchant Center + account. Executing this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_delete_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteRegionRequest( + name="name_value", + ) + + # Make the request + client.delete_region(request=request) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.DeleteRegionRequest, dict]): + The request object. Request message for the ``DeleteRegion`` method. + name (str): + Required. The name of the region to delete. Format: + ``accounts/{account}/regions/{region}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.DeleteRegionRequest): + request = regions.DeleteRegionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_region] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_regions( + self, + request: Optional[Union[regions.ListRegionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRegionsPager: + r"""Lists the regions in your Merchant Center account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_regions(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListRegionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_regions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListRegionsRequest, dict]): + The request object. Request message for the ``ListRegions`` method. + parent (str): + Required. The account to list regions for. Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.regions_service.pagers.ListRegionsPager: + Response message for the ListRegions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, regions.ListRegionsRequest): + request = regions.ListRegionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_regions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRegionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RegionsServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/pagers.py new file mode 100644 index 000000000000..3f9e97f403ee --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_accounts_v1beta.types import regions + + +class ListRegionsPager: + """A pager for iterating through ``list_regions`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListRegionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``regions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRegions`` requests and continue to iterate + through the ``regions`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListRegionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., regions.ListRegionsResponse], + request: regions.ListRegionsRequest, + response: regions.ListRegionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListRegionsRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListRegionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = regions.ListRegionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[regions.ListRegionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[regions.Region]: + for page in self.pages: + yield from page.regions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRegionsAsyncPager: + """A pager for iterating through ``list_regions`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListRegionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``regions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRegions`` requests and continue to iterate + through the ``regions`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListRegionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[regions.ListRegionsResponse]], + request: regions.ListRegionsRequest, + response: regions.ListRegionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListRegionsRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListRegionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = regions.ListRegionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[regions.ListRegionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[regions.Region]: + async def async_generator(): + async for page in self.pages: + for response in page.regions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/__init__.py new file mode 100644 index 000000000000..93cbc582bccc --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionsServiceTransport +from .grpc import RegionsServiceGrpcTransport +from .grpc_asyncio import RegionsServiceGrpcAsyncIOTransport +from .rest import RegionsServiceRestInterceptor, RegionsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionsServiceTransport]] +_transport_registry["grpc"] = RegionsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = RegionsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = RegionsServiceRestTransport + +__all__ = ( + "RegionsServiceTransport", + "RegionsServiceGrpcTransport", + "RegionsServiceGrpcAsyncIOTransport", + "RegionsServiceRestTransport", + "RegionsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/base.py new file mode 100644 index 000000000000..f1c36aec1469 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/base.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import regions + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class RegionsServiceTransport(abc.ABC): + """Abstract transport class for RegionsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_region: gapic_v1.method.wrap_method( + self.get_region, + default_timeout=None, + client_info=client_info, + ), + self.create_region: gapic_v1.method.wrap_method( + self.create_region, + default_timeout=None, + client_info=client_info, + ), + self.update_region: gapic_v1.method.wrap_method( + self.update_region, + default_timeout=None, + client_info=client_info, + ), + self.delete_region: gapic_v1.method.wrap_method( + self.delete_region, + default_timeout=None, + client_info=client_info, + ), + self.list_regions: gapic_v1.method.wrap_method( + self.list_regions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_region( + self, + ) -> Callable[ + [regions.GetRegionRequest], Union[regions.Region, Awaitable[regions.Region]] + ]: + raise NotImplementedError() + + @property + def create_region( + self, + ) -> Callable[ + [regions.CreateRegionRequest], Union[regions.Region, Awaitable[regions.Region]] + ]: + raise NotImplementedError() + + @property + def update_region( + self, + ) -> Callable[ + [regions.UpdateRegionRequest], Union[regions.Region, Awaitable[regions.Region]] + ]: + raise NotImplementedError() + + @property + def delete_region( + self, + ) -> Callable[ + [regions.DeleteRegionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_regions( + self, + ) -> Callable[ + [regions.ListRegionsRequest], + Union[regions.ListRegionsResponse, Awaitable[regions.ListRegionsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("RegionsServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/grpc.py new file mode 100644 index 000000000000..b43307019ac1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/grpc.py @@ -0,0 +1,375 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import regions + +from .base import DEFAULT_CLIENT_INFO, RegionsServiceTransport + + +class RegionsServiceGrpcTransport(RegionsServiceTransport): + """gRPC backend transport for RegionsService. + + Manages regions configuration. + + This API defines the following resource model: + + - [Region][google.shopping.merchant.accounts.v1main.Region] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_region(self) -> Callable[[regions.GetRegionRequest], regions.Region]: + r"""Return a callable for the get region method over gRPC. + + Retrieves a region defined in your Merchant Center + account. + + Returns: + Callable[[~.GetRegionRequest], + ~.Region]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_region" not in self._stubs: + self._stubs["get_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/GetRegion", + request_serializer=regions.GetRegionRequest.serialize, + response_deserializer=regions.Region.deserialize, + ) + return self._stubs["get_region"] + + @property + def create_region(self) -> Callable[[regions.CreateRegionRequest], regions.Region]: + r"""Return a callable for the create region method over gRPC. + + Creates a region definition in your Merchant Center + account. Executing this method requires admin access. + + Returns: + Callable[[~.CreateRegionRequest], + ~.Region]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_region" not in self._stubs: + self._stubs["create_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/CreateRegion", + request_serializer=regions.CreateRegionRequest.serialize, + response_deserializer=regions.Region.deserialize, + ) + return self._stubs["create_region"] + + @property + def update_region(self) -> Callable[[regions.UpdateRegionRequest], regions.Region]: + r"""Return a callable for the update region method over gRPC. + + Updates a region definition in your Merchant Center + account. Executing this method requires admin access. + + Returns: + Callable[[~.UpdateRegionRequest], + ~.Region]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_region" not in self._stubs: + self._stubs["update_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/UpdateRegion", + request_serializer=regions.UpdateRegionRequest.serialize, + response_deserializer=regions.Region.deserialize, + ) + return self._stubs["update_region"] + + @property + def delete_region(self) -> Callable[[regions.DeleteRegionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete region method over gRPC. + + Deletes a region definition from your Merchant Center + account. Executing this method requires admin access. + + Returns: + Callable[[~.DeleteRegionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_region" not in self._stubs: + self._stubs["delete_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/DeleteRegion", + request_serializer=regions.DeleteRegionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_region"] + + @property + def list_regions( + self, + ) -> Callable[[regions.ListRegionsRequest], regions.ListRegionsResponse]: + r"""Return a callable for the list regions method over gRPC. + + Lists the regions in your Merchant Center account. + + Returns: + Callable[[~.ListRegionsRequest], + ~.ListRegionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_regions" not in self._stubs: + self._stubs["list_regions"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/ListRegions", + request_serializer=regions.ListRegionsRequest.serialize, + response_deserializer=regions.ListRegionsResponse.deserialize, + ) + return self._stubs["list_regions"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("RegionsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..9f0cfe2d450a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/grpc_asyncio.py @@ -0,0 +1,413 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import regions + +from .base import DEFAULT_CLIENT_INFO, RegionsServiceTransport +from .grpc import RegionsServiceGrpcTransport + + +class RegionsServiceGrpcAsyncIOTransport(RegionsServiceTransport): + """gRPC AsyncIO backend transport for RegionsService. + + Manages regions configuration. + + This API defines the following resource model: + + - [Region][google.shopping.merchant.accounts.v1main.Region] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_region( + self, + ) -> Callable[[regions.GetRegionRequest], Awaitable[regions.Region]]: + r"""Return a callable for the get region method over gRPC. + + Retrieves a region defined in your Merchant Center + account. + + Returns: + Callable[[~.GetRegionRequest], + Awaitable[~.Region]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_region" not in self._stubs: + self._stubs["get_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/GetRegion", + request_serializer=regions.GetRegionRequest.serialize, + response_deserializer=regions.Region.deserialize, + ) + return self._stubs["get_region"] + + @property + def create_region( + self, + ) -> Callable[[regions.CreateRegionRequest], Awaitable[regions.Region]]: + r"""Return a callable for the create region method over gRPC. + + Creates a region definition in your Merchant Center + account. Executing this method requires admin access. + + Returns: + Callable[[~.CreateRegionRequest], + Awaitable[~.Region]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_region" not in self._stubs: + self._stubs["create_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/CreateRegion", + request_serializer=regions.CreateRegionRequest.serialize, + response_deserializer=regions.Region.deserialize, + ) + return self._stubs["create_region"] + + @property + def update_region( + self, + ) -> Callable[[regions.UpdateRegionRequest], Awaitable[regions.Region]]: + r"""Return a callable for the update region method over gRPC. + + Updates a region definition in your Merchant Center + account. Executing this method requires admin access. + + Returns: + Callable[[~.UpdateRegionRequest], + Awaitable[~.Region]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_region" not in self._stubs: + self._stubs["update_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/UpdateRegion", + request_serializer=regions.UpdateRegionRequest.serialize, + response_deserializer=regions.Region.deserialize, + ) + return self._stubs["update_region"] + + @property + def delete_region( + self, + ) -> Callable[[regions.DeleteRegionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete region method over gRPC. + + Deletes a region definition from your Merchant Center + account. Executing this method requires admin access. + + Returns: + Callable[[~.DeleteRegionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_region" not in self._stubs: + self._stubs["delete_region"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/DeleteRegion", + request_serializer=regions.DeleteRegionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_region"] + + @property + def list_regions( + self, + ) -> Callable[[regions.ListRegionsRequest], Awaitable[regions.ListRegionsResponse]]: + r"""Return a callable for the list regions method over gRPC. + + Lists the regions in your Merchant Center account. + + Returns: + Callable[[~.ListRegionsRequest], + Awaitable[~.ListRegionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_regions" not in self._stubs: + self._stubs["list_regions"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.RegionsService/ListRegions", + request_serializer=regions.ListRegionsRequest.serialize, + response_deserializer=regions.ListRegionsResponse.deserialize, + ) + return self._stubs["list_regions"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_region: gapic_v1.method_async.wrap_method( + self.get_region, + default_timeout=None, + client_info=client_info, + ), + self.create_region: gapic_v1.method_async.wrap_method( + self.create_region, + default_timeout=None, + client_info=client_info, + ), + self.update_region: gapic_v1.method_async.wrap_method( + self.update_region, + default_timeout=None, + client_info=client_info, + ), + self.delete_region: gapic_v1.method_async.wrap_method( + self.delete_region, + default_timeout=None, + client_info=client_info, + ), + self.list_regions: gapic_v1.method_async.wrap_method( + self.list_regions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("RegionsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/rest.py new file mode 100644 index 000000000000..4302f83d671a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/transports/rest.py @@ -0,0 +1,792 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import regions + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RegionsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionsServiceRestInterceptor: + """Interceptor for RegionsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionsServiceRestTransport. + + .. code-block:: python + class MyCustomRegionsServiceInterceptor(RegionsServiceRestInterceptor): + def pre_create_region(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_region(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_region(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_region(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_region(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_regions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_regions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_region(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_region(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionsServiceRestTransport(interceptor=MyCustomRegionsServiceInterceptor()) + client = RegionsServiceClient(transport=transport) + + + """ + + def pre_create_region( + self, request: regions.CreateRegionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[regions.CreateRegionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_region + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionsService server. + """ + return request, metadata + + def post_create_region(self, response: regions.Region) -> regions.Region: + """Post-rpc interceptor for create_region + + Override in a subclass to manipulate the response + after it is returned by the RegionsService server but before + it is returned to user code. + """ + return response + + def pre_delete_region( + self, request: regions.DeleteRegionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[regions.DeleteRegionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_region + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionsService server. + """ + return request, metadata + + def pre_get_region( + self, request: regions.GetRegionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[regions.GetRegionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_region + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionsService server. + """ + return request, metadata + + def post_get_region(self, response: regions.Region) -> regions.Region: + """Post-rpc interceptor for get_region + + Override in a subclass to manipulate the response + after it is returned by the RegionsService server but before + it is returned to user code. + """ + return response + + def pre_list_regions( + self, request: regions.ListRegionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[regions.ListRegionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_regions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionsService server. + """ + return request, metadata + + def post_list_regions( + self, response: regions.ListRegionsResponse + ) -> regions.ListRegionsResponse: + """Post-rpc interceptor for list_regions + + Override in a subclass to manipulate the response + after it is returned by the RegionsService server but before + it is returned to user code. + """ + return response + + def pre_update_region( + self, request: regions.UpdateRegionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[regions.UpdateRegionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_region + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionsService server. + """ + return request, metadata + + def post_update_region(self, response: regions.Region) -> regions.Region: + """Post-rpc interceptor for update_region + + Override in a subclass to manipulate the response + after it is returned by the RegionsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionsServiceRestInterceptor + + +class RegionsServiceRestTransport(RegionsServiceTransport): + """REST backend transport for RegionsService. + + Manages regions configuration. + + This API defines the following resource model: + + - [Region][google.shopping.merchant.accounts.v1main.Region] + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RegionsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateRegion(RegionsServiceRestStub): + def __hash__(self): + return hash("CreateRegion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "regionId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: regions.CreateRegionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Call the create region method over HTTP. + + Args: + request (~.regions.CreateRegionRequest): + The request object. Request message for the ``CreateRegion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.regions.Region: + Represents a geographic region that you can use as a + target with both the ``RegionalInventory`` and + ``ShippingSettings`` services. You can define regions as + collections of either postal codes or, in some + countries, using predefined geotargets. For more + information, see `Set up + regions `__ + for more information. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{parent=accounts/*}/regions", + "body": "region", + }, + ] + request, metadata = self._interceptor.pre_create_region(request, metadata) + pb_request = regions.CreateRegionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = regions.Region() + pb_resp = regions.Region.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_region(resp) + return resp + + class _DeleteRegion(RegionsServiceRestStub): + def __hash__(self): + return hash("DeleteRegion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: regions.DeleteRegionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete region method over HTTP. + + Args: + request (~.regions.DeleteRegionRequest): + The request object. Request message for the ``DeleteRegion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/accounts/v1beta/{name=accounts/*/regions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_region(request, metadata) + pb_request = regions.DeleteRegionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetRegion(RegionsServiceRestStub): + def __hash__(self): + return hash("GetRegion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: regions.GetRegionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Call the get region method over HTTP. + + Args: + request (~.regions.GetRegionRequest): + The request object. Request message for the ``GetRegion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.regions.Region: + Represents a geographic region that you can use as a + target with both the ``RegionalInventory`` and + ``ShippingSettings`` services. You can define regions as + collections of either postal codes or, in some + countries, using predefined geotargets. For more + information, see `Set up + regions `__ + for more information. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/regions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_region(request, metadata) + pb_request = regions.GetRegionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = regions.Region() + pb_resp = regions.Region.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_region(resp) + return resp + + class _ListRegions(RegionsServiceRestStub): + def __hash__(self): + return hash("ListRegions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: regions.ListRegionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.ListRegionsResponse: + r"""Call the list regions method over HTTP. + + Args: + request (~.regions.ListRegionsRequest): + The request object. Request message for the ``ListRegions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.regions.ListRegionsResponse: + Response message for the ``ListRegions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/regions", + }, + ] + request, metadata = self._interceptor.pre_list_regions(request, metadata) + pb_request = regions.ListRegionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = regions.ListRegionsResponse() + pb_resp = regions.ListRegionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_regions(resp) + return resp + + class _UpdateRegion(RegionsServiceRestStub): + def __hash__(self): + return hash("UpdateRegion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: regions.UpdateRegionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> regions.Region: + r"""Call the update region method over HTTP. + + Args: + request (~.regions.UpdateRegionRequest): + The request object. Request message for the ``UpdateRegion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.regions.Region: + Represents a geographic region that you can use as a + target with both the ``RegionalInventory`` and + ``ShippingSettings`` services. You can define regions as + collections of either postal codes or, in some + countries, using predefined geotargets. For more + information, see `Set up + regions `__ + for more information. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{region.name=accounts/*/regions/*}", + "body": "region", + }, + ] + request, metadata = self._interceptor.pre_update_region(request, metadata) + pb_request = regions.UpdateRegionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = regions.Region() + pb_resp = regions.Region.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_region(resp) + return resp + + @property + def create_region(self) -> Callable[[regions.CreateRegionRequest], regions.Region]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRegion(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_region(self) -> Callable[[regions.DeleteRegionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRegion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_region(self) -> Callable[[regions.GetRegionRequest], regions.Region]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRegion(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_regions( + self, + ) -> Callable[[regions.ListRegionsRequest], regions.ListRegionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRegions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_region(self) -> Callable[[regions.UpdateRegionRequest], regions.Region]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateRegion(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RegionsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/__init__.py new file mode 100644 index 000000000000..dfd9d0faa08b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ShippingSettingsServiceAsyncClient +from .client import ShippingSettingsServiceClient + +__all__ = ( + "ShippingSettingsServiceClient", + "ShippingSettingsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py new file mode 100644 index 000000000000..a12e387affcd --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py @@ -0,0 +1,487 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + +from .client import ShippingSettingsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ShippingSettingsServiceTransport +from .transports.grpc_asyncio import ShippingSettingsServiceGrpcAsyncIOTransport + + +class ShippingSettingsServiceAsyncClient: + """Service to get method call shipping setting information per + Merchant API method. + """ + + _client: ShippingSettingsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ShippingSettingsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ShippingSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + ShippingSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = ShippingSettingsServiceClient._DEFAULT_UNIVERSE + + shipping_settings_path = staticmethod( + ShippingSettingsServiceClient.shipping_settings_path + ) + parse_shipping_settings_path = staticmethod( + ShippingSettingsServiceClient.parse_shipping_settings_path + ) + common_billing_account_path = staticmethod( + ShippingSettingsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ShippingSettingsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ShippingSettingsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ShippingSettingsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ShippingSettingsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ShippingSettingsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + ShippingSettingsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + ShippingSettingsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + ShippingSettingsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + ShippingSettingsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ShippingSettingsServiceAsyncClient: The constructed client. + """ + return ShippingSettingsServiceClient.from_service_account_info.__func__(ShippingSettingsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ShippingSettingsServiceAsyncClient: The constructed client. + """ + return ShippingSettingsServiceClient.from_service_account_file.__func__(ShippingSettingsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ShippingSettingsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ShippingSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ShippingSettingsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ShippingSettingsServiceClient).get_transport_class, + type(ShippingSettingsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ShippingSettingsServiceTransport, + Callable[..., ShippingSettingsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the shipping settings service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ShippingSettingsServiceTransport,Callable[..., ShippingSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ShippingSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ShippingSettingsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_shipping_settings( + self, + request: Optional[ + Union[shippingsettings.GetShippingSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> shippingsettings.ShippingSettings: + r"""Retrieve shipping setting information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetShippingSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_shipping_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetShippingSettingsRequest, dict]]): + The request object. Request message for the ``GetShippingSetting`` method. + name (:class:`str`): + Required. The name of the shipping setting to retrieve. + Format: ``accounts/{account}/shippingsetting`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.ShippingSettings: + The merchant account's [shipping + setting]((\ https://support.google.com/merchants/answer/6069284). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, shippingsettings.GetShippingSettingsRequest): + request = shippingsettings.GetShippingSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_shipping_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def insert_shipping_settings( + self, + request: Optional[ + Union[shippingsettings.InsertShippingSettingsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> shippingsettings.ShippingSettings: + r"""Replace the shipping setting of a merchant with the + request shipping setting. Executing this method requires + admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_insert_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient() + + # Initialize request argument(s) + shipping_setting = merchant_accounts_v1beta.ShippingSettings() + shipping_setting.etag = "etag_value" + + request = merchant_accounts_v1beta.InsertShippingSettingsRequest( + parent="parent_value", + shipping_setting=shipping_setting, + ) + + # Make the request + response = await client.insert_shipping_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.InsertShippingSettingsRequest, dict]]): + The request object. Request message for the ``InsertShippingSetting`` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.ShippingSettings: + The merchant account's [shipping + setting]((\ https://support.google.com/merchants/answer/6069284). + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, shippingsettings.InsertShippingSettingsRequest): + request = shippingsettings.InsertShippingSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.insert_shipping_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ShippingSettingsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ShippingSettingsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py new file mode 100644 index 000000000000..1252e98263f2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py @@ -0,0 +1,896 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + +from .transports.base import DEFAULT_CLIENT_INFO, ShippingSettingsServiceTransport +from .transports.grpc import ShippingSettingsServiceGrpcTransport +from .transports.grpc_asyncio import ShippingSettingsServiceGrpcAsyncIOTransport +from .transports.rest import ShippingSettingsServiceRestTransport + + +class ShippingSettingsServiceClientMeta(type): + """Metaclass for the ShippingSettingsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ShippingSettingsServiceTransport]] + _transport_registry["grpc"] = ShippingSettingsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ShippingSettingsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ShippingSettingsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ShippingSettingsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ShippingSettingsServiceClient(metaclass=ShippingSettingsServiceClientMeta): + """Service to get method call shipping setting information per + Merchant API method. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ShippingSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ShippingSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ShippingSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ShippingSettingsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def shipping_settings_path( + account: str, + ) -> str: + """Returns a fully-qualified shipping_settings string.""" + return "accounts/{account}/shippingSettings".format( + account=account, + ) + + @staticmethod + def parse_shipping_settings_path(path: str) -> Dict[str, str]: + """Parses a shipping_settings path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/shippingSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ShippingSettingsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ShippingSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + ShippingSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ShippingSettingsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ShippingSettingsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ShippingSettingsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ShippingSettingsServiceTransport, + Callable[..., ShippingSettingsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the shipping settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ShippingSettingsServiceTransport,Callable[..., ShippingSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ShippingSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ShippingSettingsServiceClient._read_environment_variables() + self._client_cert_source = ( + ShippingSettingsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ShippingSettingsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ShippingSettingsServiceTransport) + if transport_provided: + # transport is a ShippingSettingsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ShippingSettingsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ShippingSettingsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ShippingSettingsServiceTransport], + Callable[..., ShippingSettingsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ShippingSettingsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_shipping_settings( + self, + request: Optional[ + Union[shippingsettings.GetShippingSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> shippingsettings.ShippingSettings: + r"""Retrieve shipping setting information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetShippingSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_shipping_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetShippingSettingsRequest, dict]): + The request object. Request message for the ``GetShippingSetting`` method. + name (str): + Required. The name of the shipping setting to retrieve. + Format: ``accounts/{account}/shippingsetting`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.ShippingSettings: + The merchant account's [shipping + setting]((\ https://support.google.com/merchants/answer/6069284). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, shippingsettings.GetShippingSettingsRequest): + request = shippingsettings.GetShippingSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_shipping_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_shipping_settings( + self, + request: Optional[ + Union[shippingsettings.InsertShippingSettingsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> shippingsettings.ShippingSettings: + r"""Replace the shipping setting of a merchant with the + request shipping setting. Executing this method requires + admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_insert_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceClient() + + # Initialize request argument(s) + shipping_setting = merchant_accounts_v1beta.ShippingSettings() + shipping_setting.etag = "etag_value" + + request = merchant_accounts_v1beta.InsertShippingSettingsRequest( + parent="parent_value", + shipping_setting=shipping_setting, + ) + + # Make the request + response = client.insert_shipping_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.InsertShippingSettingsRequest, dict]): + The request object. Request message for the ``InsertShippingSetting`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.ShippingSettings: + The merchant account's [shipping + setting]((\ https://support.google.com/merchants/answer/6069284). + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, shippingsettings.InsertShippingSettingsRequest): + request = shippingsettings.InsertShippingSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert_shipping_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ShippingSettingsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ShippingSettingsServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/__init__.py new file mode 100644 index 000000000000..672c15285a2c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ShippingSettingsServiceTransport +from .grpc import ShippingSettingsServiceGrpcTransport +from .grpc_asyncio import ShippingSettingsServiceGrpcAsyncIOTransport +from .rest import ( + ShippingSettingsServiceRestInterceptor, + ShippingSettingsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ShippingSettingsServiceTransport]] +_transport_registry["grpc"] = ShippingSettingsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ShippingSettingsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ShippingSettingsServiceRestTransport + +__all__ = ( + "ShippingSettingsServiceTransport", + "ShippingSettingsServiceGrpcTransport", + "ShippingSettingsServiceGrpcAsyncIOTransport", + "ShippingSettingsServiceRestTransport", + "ShippingSettingsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/base.py new file mode 100644 index 000000000000..a5d4ac3b74f7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/base.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ShippingSettingsServiceTransport(abc.ABC): + """Abstract transport class for ShippingSettingsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_shipping_settings: gapic_v1.method.wrap_method( + self.get_shipping_settings, + default_timeout=None, + client_info=client_info, + ), + self.insert_shipping_settings: gapic_v1.method.wrap_method( + self.insert_shipping_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.GetShippingSettingsRequest], + Union[ + shippingsettings.ShippingSettings, + Awaitable[shippingsettings.ShippingSettings], + ], + ]: + raise NotImplementedError() + + @property + def insert_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.InsertShippingSettingsRequest], + Union[ + shippingsettings.ShippingSettings, + Awaitable[shippingsettings.ShippingSettings], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ShippingSettingsServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/grpc.py new file mode 100644 index 000000000000..403d1cb5116d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/grpc.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + +from .base import DEFAULT_CLIENT_INFO, ShippingSettingsServiceTransport + + +class ShippingSettingsServiceGrpcTransport(ShippingSettingsServiceTransport): + """gRPC backend transport for ShippingSettingsService. + + Service to get method call shipping setting information per + Merchant API method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.GetShippingSettingsRequest], shippingsettings.ShippingSettings + ]: + r"""Return a callable for the get shipping settings method over gRPC. + + Retrieve shipping setting information. + + Returns: + Callable[[~.GetShippingSettingsRequest], + ~.ShippingSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_shipping_settings" not in self._stubs: + self._stubs["get_shipping_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ShippingSettingsService/GetShippingSettings", + request_serializer=shippingsettings.GetShippingSettingsRequest.serialize, + response_deserializer=shippingsettings.ShippingSettings.deserialize, + ) + return self._stubs["get_shipping_settings"] + + @property + def insert_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.InsertShippingSettingsRequest], + shippingsettings.ShippingSettings, + ]: + r"""Return a callable for the insert shipping settings method over gRPC. + + Replace the shipping setting of a merchant with the + request shipping setting. Executing this method requires + admin access. + + Returns: + Callable[[~.InsertShippingSettingsRequest], + ~.ShippingSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "insert_shipping_settings" not in self._stubs: + self._stubs["insert_shipping_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ShippingSettingsService/InsertShippingSettings", + request_serializer=shippingsettings.InsertShippingSettingsRequest.serialize, + response_deserializer=shippingsettings.ShippingSettings.deserialize, + ) + return self._stubs["insert_shipping_settings"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ShippingSettingsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6a415d1d4e00 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/grpc_asyncio.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + +from .base import DEFAULT_CLIENT_INFO, ShippingSettingsServiceTransport +from .grpc import ShippingSettingsServiceGrpcTransport + + +class ShippingSettingsServiceGrpcAsyncIOTransport(ShippingSettingsServiceTransport): + """gRPC AsyncIO backend transport for ShippingSettingsService. + + Service to get method call shipping setting information per + Merchant API method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.GetShippingSettingsRequest], + Awaitable[shippingsettings.ShippingSettings], + ]: + r"""Return a callable for the get shipping settings method over gRPC. + + Retrieve shipping setting information. + + Returns: + Callable[[~.GetShippingSettingsRequest], + Awaitable[~.ShippingSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_shipping_settings" not in self._stubs: + self._stubs["get_shipping_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ShippingSettingsService/GetShippingSettings", + request_serializer=shippingsettings.GetShippingSettingsRequest.serialize, + response_deserializer=shippingsettings.ShippingSettings.deserialize, + ) + return self._stubs["get_shipping_settings"] + + @property + def insert_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.InsertShippingSettingsRequest], + Awaitable[shippingsettings.ShippingSettings], + ]: + r"""Return a callable for the insert shipping settings method over gRPC. + + Replace the shipping setting of a merchant with the + request shipping setting. Executing this method requires + admin access. + + Returns: + Callable[[~.InsertShippingSettingsRequest], + Awaitable[~.ShippingSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "insert_shipping_settings" not in self._stubs: + self._stubs["insert_shipping_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.ShippingSettingsService/InsertShippingSettings", + request_serializer=shippingsettings.InsertShippingSettingsRequest.serialize, + response_deserializer=shippingsettings.ShippingSettings.deserialize, + ) + return self._stubs["insert_shipping_settings"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_shipping_settings: gapic_v1.method_async.wrap_method( + self.get_shipping_settings, + default_timeout=None, + client_info=client_info, + ), + self.insert_shipping_settings: gapic_v1.method_async.wrap_method( + self.insert_shipping_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ShippingSettingsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py new file mode 100644 index 000000000000..f93d7be50bf0 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py @@ -0,0 +1,449 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ShippingSettingsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ShippingSettingsServiceRestInterceptor: + """Interceptor for ShippingSettingsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ShippingSettingsServiceRestTransport. + + .. code-block:: python + class MyCustomShippingSettingsServiceInterceptor(ShippingSettingsServiceRestInterceptor): + def pre_get_shipping_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_shipping_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert_shipping_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert_shipping_settings(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ShippingSettingsServiceRestTransport(interceptor=MyCustomShippingSettingsServiceInterceptor()) + client = ShippingSettingsServiceClient(transport=transport) + + + """ + + def pre_get_shipping_settings( + self, + request: shippingsettings.GetShippingSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[shippingsettings.GetShippingSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_shipping_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the ShippingSettingsService server. + """ + return request, metadata + + def post_get_shipping_settings( + self, response: shippingsettings.ShippingSettings + ) -> shippingsettings.ShippingSettings: + """Post-rpc interceptor for get_shipping_settings + + Override in a subclass to manipulate the response + after it is returned by the ShippingSettingsService server but before + it is returned to user code. + """ + return response + + def pre_insert_shipping_settings( + self, + request: shippingsettings.InsertShippingSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + shippingsettings.InsertShippingSettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for insert_shipping_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the ShippingSettingsService server. + """ + return request, metadata + + def post_insert_shipping_settings( + self, response: shippingsettings.ShippingSettings + ) -> shippingsettings.ShippingSettings: + """Post-rpc interceptor for insert_shipping_settings + + Override in a subclass to manipulate the response + after it is returned by the ShippingSettingsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ShippingSettingsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ShippingSettingsServiceRestInterceptor + + +class ShippingSettingsServiceRestTransport(ShippingSettingsServiceTransport): + """REST backend transport for ShippingSettingsService. + + Service to get method call shipping setting information per + Merchant API method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ShippingSettingsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ShippingSettingsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetShippingSettings(ShippingSettingsServiceRestStub): + def __hash__(self): + return hash("GetShippingSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: shippingsettings.GetShippingSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> shippingsettings.ShippingSettings: + r"""Call the get shipping settings method over HTTP. + + Args: + request (~.shippingsettings.GetShippingSettingsRequest): + The request object. Request message for the ``GetShippingSetting`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.shippingsettings.ShippingSettings: + The merchant account's [shipping + setting]((https://support.google.com/merchants/answer/6069284). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/shippingSettings}", + }, + ] + request, metadata = self._interceptor.pre_get_shipping_settings( + request, metadata + ) + pb_request = shippingsettings.GetShippingSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = shippingsettings.ShippingSettings() + pb_resp = shippingsettings.ShippingSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_shipping_settings(resp) + return resp + + class _InsertShippingSettings(ShippingSettingsServiceRestStub): + def __hash__(self): + return hash("InsertShippingSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: shippingsettings.InsertShippingSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> shippingsettings.ShippingSettings: + r"""Call the insert shipping settings method over HTTP. + + Args: + request (~.shippingsettings.InsertShippingSettingsRequest): + The request object. Request message for the ``InsertShippingSetting`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.shippingsettings.ShippingSettings: + The merchant account's [shipping + setting]((https://support.google.com/merchants/answer/6069284). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{parent=accounts/*}/shippingSettings:insert", + "body": "shipping_setting", + }, + ] + request, metadata = self._interceptor.pre_insert_shipping_settings( + request, metadata + ) + pb_request = shippingsettings.InsertShippingSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = shippingsettings.ShippingSettings() + pb_resp = shippingsettings.ShippingSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert_shipping_settings(resp) + return resp + + @property + def get_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.GetShippingSettingsRequest], shippingsettings.ShippingSettings + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetShippingSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert_shipping_settings( + self, + ) -> Callable[ + [shippingsettings.InsertShippingSettingsRequest], + shippingsettings.ShippingSettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InsertShippingSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ShippingSettingsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/__init__.py new file mode 100644 index 000000000000..de879d7880b7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TermsOfServiceAgreementStateServiceAsyncClient +from .client import TermsOfServiceAgreementStateServiceClient + +__all__ = ( + "TermsOfServiceAgreementStateServiceClient", + "TermsOfServiceAgreementStateServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py new file mode 100644 index 000000000000..a1383137817a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py @@ -0,0 +1,595 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + termsofserviceagreementstate, + termsofservicekind, +) + +from .client import TermsOfServiceAgreementStateServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + TermsOfServiceAgreementStateServiceTransport, +) +from .transports.grpc_asyncio import ( + TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, +) + + +class TermsOfServiceAgreementStateServiceAsyncClient: + """Service to support ``TermsOfServiceAgreementState`` API.""" + + _client: TermsOfServiceAgreementStateServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = TermsOfServiceAgreementStateServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ( + TermsOfServiceAgreementStateServiceClient.DEFAULT_MTLS_ENDPOINT + ) + _DEFAULT_ENDPOINT_TEMPLATE = ( + TermsOfServiceAgreementStateServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(TermsOfServiceAgreementStateServiceClient.account_path) + parse_account_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_account_path + ) + terms_of_service_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.terms_of_service_path + ) + parse_terms_of_service_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_terms_of_service_path + ) + terms_of_service_agreement_state_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.terms_of_service_agreement_state_path + ) + parse_terms_of_service_agreement_state_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_terms_of_service_agreement_state_path + ) + common_billing_account_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + TermsOfServiceAgreementStateServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceAgreementStateServiceAsyncClient: The constructed client. + """ + return TermsOfServiceAgreementStateServiceClient.from_service_account_info.__func__(TermsOfServiceAgreementStateServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceAgreementStateServiceAsyncClient: The constructed client. + """ + return TermsOfServiceAgreementStateServiceClient.from_service_account_file.__func__(TermsOfServiceAgreementStateServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TermsOfServiceAgreementStateServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TermsOfServiceAgreementStateServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TermsOfServiceAgreementStateServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(TermsOfServiceAgreementStateServiceClient).get_transport_class, + type(TermsOfServiceAgreementStateServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + TermsOfServiceAgreementStateServiceTransport, + Callable[..., TermsOfServiceAgreementStateServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the terms of service agreement state service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,TermsOfServiceAgreementStateServiceTransport,Callable[..., TermsOfServiceAgreementStateServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TermsOfServiceAgreementStateServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TermsOfServiceAgreementStateServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_terms_of_service_agreement_state( + self, + request: Optional[ + Union[ + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + r"""Returns the state of a terms of service agreement. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceAgreementStateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceAgreementStateRequest, dict]]): + The request object. Request message for the + ``GetTermsOfServiceAgreementState`` method. + name (:class:`str`): + Required. The resource name of the terms of service + version. Format: + ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState: + This resource represents the agreement state for a given account and terms of + service kind. The state is as follows: + + \* If the merchant has accepted a terms of service: + [accepted](TermsOfServiceAggrementState.accepted) + will be populated, otherwise it will be empty \* If + the merchant must sign a terms of service: + [required](TermsOfServiceAggrementState.required) + will be populated, otherwise it will be empty. + + Note that both + [required](TermsOfServiceAggrementState.required) and + [accepted](TermsOfServiceAggrementState.accepted) can + be present. In this case the accepted terms of + services will have an expiration date set in the + [valid_until](Accepted.valid_until) field. The + required terms of services need to be accepted before + valid_until in order for the account to continue + having a valid agreement. When accepting new terms of + services we expect 3Ps to display the text associated + with the given terms of service agreement (the url to + the file containing the text is added in the Required + message below as + [tos_file_uri](Accepted.tos_file_uri). The actual + acceptance of the terms of service is done by calling + accept on the [TermsOfService](TermsOfService) + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest + ): + request = ( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_terms_of_service_agreement_state + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retrieve_for_application_terms_of_service_agreement_state( + self, + request: Optional[ + Union[ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + r"""Retrieves the state of the agreement for the + application terms of service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_retrieve_for_application_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.retrieve_for_application_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.RetrieveForApplicationTermsOfServiceAgreementStateRequest, dict]]): + The request object. Request message for the + ``RetrieveForApplicationTermsOfServiceAgreementState`` + method. + parent (:class:`str`): + Required. The account for which to get a + TermsOfServiceAgreementState Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState: + This resource represents the agreement state for a given account and terms of + service kind. The state is as follows: + + \* If the merchant has accepted a terms of service: + [accepted](TermsOfServiceAggrementState.accepted) + will be populated, otherwise it will be empty \* If + the merchant must sign a terms of service: + [required](TermsOfServiceAggrementState.required) + will be populated, otherwise it will be empty. + + Note that both + [required](TermsOfServiceAggrementState.required) and + [accepted](TermsOfServiceAggrementState.accepted) can + be present. In this case the accepted terms of + services will have an expiration date set in the + [valid_until](Accepted.valid_until) field. The + required terms of services need to be accepted before + valid_until in order for the account to continue + having a valid agreement. When accepting new terms of + services we expect 3Ps to display the text associated + with the given terms of service agreement (the url to + the file containing the text is added in the Required + message below as + [tos_file_uri](Accepted.tos_file_uri). The actual + acceptance of the terms of service is done by calling + accept on the [TermsOfService](TermsOfService) + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + ): + request = termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.retrieve_for_application_terms_of_service_agreement_state + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TermsOfServiceAgreementStateServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TermsOfServiceAgreementStateServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py new file mode 100644 index 000000000000..8be4e9679afe --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py @@ -0,0 +1,1043 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + termsofserviceagreementstate, + termsofservicekind, +) + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + TermsOfServiceAgreementStateServiceTransport, +) +from .transports.grpc import TermsOfServiceAgreementStateServiceGrpcTransport +from .transports.grpc_asyncio import ( + TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, +) +from .transports.rest import TermsOfServiceAgreementStateServiceRestTransport + + +class TermsOfServiceAgreementStateServiceClientMeta(type): + """Metaclass for the TermsOfServiceAgreementStateService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[TermsOfServiceAgreementStateServiceTransport]] + _transport_registry["grpc"] = TermsOfServiceAgreementStateServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TermsOfServiceAgreementStateServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TermsOfServiceAgreementStateServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TermsOfServiceAgreementStateServiceClient( + metaclass=TermsOfServiceAgreementStateServiceClientMeta +): + """Service to support ``TermsOfServiceAgreementState`` API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceAgreementStateServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceAgreementStateServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TermsOfServiceAgreementStateServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TermsOfServiceAgreementStateServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def terms_of_service_path( + version: str, + ) -> str: + """Returns a fully-qualified terms_of_service string.""" + return "termsOfService/{version}".format( + version=version, + ) + + @staticmethod + def parse_terms_of_service_path(path: str) -> Dict[str, str]: + """Parses a terms_of_service path into its component segments.""" + m = re.match(r"^termsOfService/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def terms_of_service_agreement_state_path( + account: str, + identifier: str, + ) -> str: + """Returns a fully-qualified terms_of_service_agreement_state string.""" + return "accounts/{account}/termsOfServiceAgreementStates/{identifier}".format( + account=account, + identifier=identifier, + ) + + @staticmethod + def parse_terms_of_service_agreement_state_path(path: str) -> Dict[str, str]: + """Parses a terms_of_service_agreement_state path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/termsOfServiceAgreementStates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ( + TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + ) + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ( + TermsOfServiceAgreementStateServiceClient.DEFAULT_MTLS_ENDPOINT + ) + else: + api_endpoint = TermsOfServiceAgreementStateServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or TermsOfServiceAgreementStateServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + TermsOfServiceAgreementStateServiceTransport, + Callable[..., TermsOfServiceAgreementStateServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the terms of service agreement state service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,TermsOfServiceAgreementStateServiceTransport,Callable[..., TermsOfServiceAgreementStateServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TermsOfServiceAgreementStateServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = TermsOfServiceAgreementStateServiceClient._read_environment_variables() + self._client_cert_source = ( + TermsOfServiceAgreementStateServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + TermsOfServiceAgreementStateServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, TermsOfServiceAgreementStateServiceTransport + ) + if transport_provided: + # transport is a TermsOfServiceAgreementStateServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast( + TermsOfServiceAgreementStateServiceTransport, transport + ) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[TermsOfServiceAgreementStateServiceTransport], + Callable[..., TermsOfServiceAgreementStateServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., TermsOfServiceAgreementStateServiceTransport], + transport, + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_terms_of_service_agreement_state( + self, + request: Optional[ + Union[ + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + r"""Returns the state of a terms of service agreement. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceAgreementStateRequest( + name="name_value", + ) + + # Make the request + response = client.get_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceAgreementStateRequest, dict]): + The request object. Request message for the + ``GetTermsOfServiceAgreementState`` method. + name (str): + Required. The resource name of the terms of service + version. Format: + ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState: + This resource represents the agreement state for a given account and terms of + service kind. The state is as follows: + + \* If the merchant has accepted a terms of service: + [accepted](TermsOfServiceAggrementState.accepted) + will be populated, otherwise it will be empty \* If + the merchant must sign a terms of service: + [required](TermsOfServiceAggrementState.required) + will be populated, otherwise it will be empty. + + Note that both + [required](TermsOfServiceAggrementState.required) and + [accepted](TermsOfServiceAggrementState.accepted) can + be present. In this case the accepted terms of + services will have an expiration date set in the + [valid_until](Accepted.valid_until) field. The + required terms of services need to be accepted before + valid_until in order for the account to continue + having a valid agreement. When accepting new terms of + services we expect 3Ps to display the text associated + with the given terms of service agreement (the url to + the file containing the text is added in the Required + message below as + [tos_file_uri](Accepted.tos_file_uri). The actual + acceptance of the terms of service is done by calling + accept on the [TermsOfService](TermsOfService) + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest + ): + request = ( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_terms_of_service_agreement_state + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retrieve_for_application_terms_of_service_agreement_state( + self, + request: Optional[ + Union[ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + r"""Retrieves the state of the agreement for the + application terms of service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_retrieve_for_application_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + parent="parent_value", + ) + + # Make the request + response = client.retrieve_for_application_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.RetrieveForApplicationTermsOfServiceAgreementStateRequest, dict]): + The request object. Request message for the + ``RetrieveForApplicationTermsOfServiceAgreementState`` + method. + parent (str): + Required. The account for which to get a + TermsOfServiceAgreementState Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState: + This resource represents the agreement state for a given account and terms of + service kind. The state is as follows: + + \* If the merchant has accepted a terms of service: + [accepted](TermsOfServiceAggrementState.accepted) + will be populated, otherwise it will be empty \* If + the merchant must sign a terms of service: + [required](TermsOfServiceAggrementState.required) + will be populated, otherwise it will be empty. + + Note that both + [required](TermsOfServiceAggrementState.required) and + [accepted](TermsOfServiceAggrementState.accepted) can + be present. In this case the accepted terms of + services will have an expiration date set in the + [valid_until](Accepted.valid_until) field. The + required terms of services need to be accepted before + valid_until in order for the account to continue + having a valid agreement. When accepting new terms of + services we expect 3Ps to display the text associated + with the given terms of service agreement (the url to + the file containing the text is added in the Required + message below as + [tos_file_uri](Accepted.tos_file_uri). The actual + acceptance of the terms of service is done by calling + accept on the [TermsOfService](TermsOfService) + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + ): + request = termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_for_application_terms_of_service_agreement_state + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TermsOfServiceAgreementStateServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TermsOfServiceAgreementStateServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/__init__.py new file mode 100644 index 000000000000..8c6676ece078 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TermsOfServiceAgreementStateServiceTransport +from .grpc import TermsOfServiceAgreementStateServiceGrpcTransport +from .grpc_asyncio import TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport +from .rest import ( + TermsOfServiceAgreementStateServiceRestInterceptor, + TermsOfServiceAgreementStateServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TermsOfServiceAgreementStateServiceTransport]] +_transport_registry["grpc"] = TermsOfServiceAgreementStateServiceGrpcTransport +_transport_registry[ + "grpc_asyncio" +] = TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TermsOfServiceAgreementStateServiceRestTransport + +__all__ = ( + "TermsOfServiceAgreementStateServiceTransport", + "TermsOfServiceAgreementStateServiceGrpcTransport", + "TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport", + "TermsOfServiceAgreementStateServiceRestTransport", + "TermsOfServiceAgreementStateServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/base.py new file mode 100644 index 000000000000..f99340358531 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/base.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import termsofserviceagreementstate + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TermsOfServiceAgreementStateServiceTransport(abc.ABC): + """Abstract transport class for TermsOfServiceAgreementStateService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_terms_of_service_agreement_state: gapic_v1.method.wrap_method( + self.get_terms_of_service_agreement_state, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_for_application_terms_of_service_agreement_state: gapic_v1.method.wrap_method( + self.retrieve_for_application_terms_of_service_agreement_state, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_terms_of_service_agreement_state( + self, + ) -> Callable[ + [termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest], + Union[ + termsofserviceagreementstate.TermsOfServiceAgreementState, + Awaitable[termsofserviceagreementstate.TermsOfServiceAgreementState], + ], + ]: + raise NotImplementedError() + + @property + def retrieve_for_application_terms_of_service_agreement_state( + self, + ) -> Callable[ + [ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest + ], + Union[ + termsofserviceagreementstate.TermsOfServiceAgreementState, + Awaitable[termsofserviceagreementstate.TermsOfServiceAgreementState], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TermsOfServiceAgreementStateServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/grpc.py new file mode 100644 index 000000000000..149b690d88e0 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/grpc.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofserviceagreementstate + +from .base import DEFAULT_CLIENT_INFO, TermsOfServiceAgreementStateServiceTransport + + +class TermsOfServiceAgreementStateServiceGrpcTransport( + TermsOfServiceAgreementStateServiceTransport +): + """gRPC backend transport for TermsOfServiceAgreementStateService. + + Service to support ``TermsOfServiceAgreementState`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_terms_of_service_agreement_state( + self, + ) -> Callable[ + [termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest], + termsofserviceagreementstate.TermsOfServiceAgreementState, + ]: + r"""Return a callable for the get terms of service agreement + state method over gRPC. + + Returns the state of a terms of service agreement. + + Returns: + Callable[[~.GetTermsOfServiceAgreementStateRequest], + ~.TermsOfServiceAgreementState]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_terms_of_service_agreement_state" not in self._stubs: + self._stubs[ + "get_terms_of_service_agreement_state" + ] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService/GetTermsOfServiceAgreementState", + request_serializer=termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest.serialize, + response_deserializer=termsofserviceagreementstate.TermsOfServiceAgreementState.deserialize, + ) + return self._stubs["get_terms_of_service_agreement_state"] + + @property + def retrieve_for_application_terms_of_service_agreement_state( + self, + ) -> Callable[ + [ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest + ], + termsofserviceagreementstate.TermsOfServiceAgreementState, + ]: + r"""Return a callable for the retrieve for application terms + of service agreement state method over gRPC. + + Retrieves the state of the agreement for the + application terms of service. + + Returns: + Callable[[~.RetrieveForApplicationTermsOfServiceAgreementStateRequest], + ~.TermsOfServiceAgreementState]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if ( + "retrieve_for_application_terms_of_service_agreement_state" + not in self._stubs + ): + self._stubs[ + "retrieve_for_application_terms_of_service_agreement_state" + ] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService/RetrieveForApplicationTermsOfServiceAgreementState", + request_serializer=termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest.serialize, + response_deserializer=termsofserviceagreementstate.TermsOfServiceAgreementState.deserialize, + ) + return self._stubs["retrieve_for_application_terms_of_service_agreement_state"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TermsOfServiceAgreementStateServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e88aa30cd0be --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/grpc_asyncio.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofserviceagreementstate + +from .base import DEFAULT_CLIENT_INFO, TermsOfServiceAgreementStateServiceTransport +from .grpc import TermsOfServiceAgreementStateServiceGrpcTransport + + +class TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport( + TermsOfServiceAgreementStateServiceTransport +): + """gRPC AsyncIO backend transport for TermsOfServiceAgreementStateService. + + Service to support ``TermsOfServiceAgreementState`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_terms_of_service_agreement_state( + self, + ) -> Callable[ + [termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest], + Awaitable[termsofserviceagreementstate.TermsOfServiceAgreementState], + ]: + r"""Return a callable for the get terms of service agreement + state method over gRPC. + + Returns the state of a terms of service agreement. + + Returns: + Callable[[~.GetTermsOfServiceAgreementStateRequest], + Awaitable[~.TermsOfServiceAgreementState]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_terms_of_service_agreement_state" not in self._stubs: + self._stubs[ + "get_terms_of_service_agreement_state" + ] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService/GetTermsOfServiceAgreementState", + request_serializer=termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest.serialize, + response_deserializer=termsofserviceagreementstate.TermsOfServiceAgreementState.deserialize, + ) + return self._stubs["get_terms_of_service_agreement_state"] + + @property + def retrieve_for_application_terms_of_service_agreement_state( + self, + ) -> Callable[ + [ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest + ], + Awaitable[termsofserviceagreementstate.TermsOfServiceAgreementState], + ]: + r"""Return a callable for the retrieve for application terms + of service agreement state method over gRPC. + + Retrieves the state of the agreement for the + application terms of service. + + Returns: + Callable[[~.RetrieveForApplicationTermsOfServiceAgreementStateRequest], + Awaitable[~.TermsOfServiceAgreementState]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if ( + "retrieve_for_application_terms_of_service_agreement_state" + not in self._stubs + ): + self._stubs[ + "retrieve_for_application_terms_of_service_agreement_state" + ] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService/RetrieveForApplicationTermsOfServiceAgreementState", + request_serializer=termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest.serialize, + response_deserializer=termsofserviceagreementstate.TermsOfServiceAgreementState.deserialize, + ) + return self._stubs["retrieve_for_application_terms_of_service_agreement_state"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_terms_of_service_agreement_state: gapic_v1.method_async.wrap_method( + self.get_terms_of_service_agreement_state, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_for_application_terms_of_service_agreement_state: gapic_v1.method_async.wrap_method( + self.retrieve_for_application_terms_of_service_agreement_state, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/rest.py new file mode 100644 index 000000000000..9249faa9888d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/transports/rest.py @@ -0,0 +1,526 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import termsofserviceagreementstate + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TermsOfServiceAgreementStateServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TermsOfServiceAgreementStateServiceRestInterceptor: + """Interceptor for TermsOfServiceAgreementStateService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TermsOfServiceAgreementStateServiceRestTransport. + + .. code-block:: python + class MyCustomTermsOfServiceAgreementStateServiceInterceptor(TermsOfServiceAgreementStateServiceRestInterceptor): + def pre_get_terms_of_service_agreement_state(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_terms_of_service_agreement_state(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_for_application_terms_of_service_agreement_state(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_for_application_terms_of_service_agreement_state(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TermsOfServiceAgreementStateServiceRestTransport(interceptor=MyCustomTermsOfServiceAgreementStateServiceInterceptor()) + client = TermsOfServiceAgreementStateServiceClient(transport=transport) + + + """ + + def pre_get_terms_of_service_agreement_state( + self, + request: termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_terms_of_service_agreement_state + + Override in a subclass to manipulate the request or metadata + before they are sent to the TermsOfServiceAgreementStateService server. + """ + return request, metadata + + def post_get_terms_of_service_agreement_state( + self, response: termsofserviceagreementstate.TermsOfServiceAgreementState + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + """Post-rpc interceptor for get_terms_of_service_agreement_state + + Override in a subclass to manipulate the response + after it is returned by the TermsOfServiceAgreementStateService server but before + it is returned to user code. + """ + return response + + def pre_retrieve_for_application_terms_of_service_agreement_state( + self, + request: termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for retrieve_for_application_terms_of_service_agreement_state + + Override in a subclass to manipulate the request or metadata + before they are sent to the TermsOfServiceAgreementStateService server. + """ + return request, metadata + + def post_retrieve_for_application_terms_of_service_agreement_state( + self, response: termsofserviceagreementstate.TermsOfServiceAgreementState + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + """Post-rpc interceptor for retrieve_for_application_terms_of_service_agreement_state + + Override in a subclass to manipulate the response + after it is returned by the TermsOfServiceAgreementStateService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TermsOfServiceAgreementStateServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TermsOfServiceAgreementStateServiceRestInterceptor + + +class TermsOfServiceAgreementStateServiceRestTransport( + TermsOfServiceAgreementStateServiceTransport +): + """REST backend transport for TermsOfServiceAgreementStateService. + + Service to support ``TermsOfServiceAgreementState`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ + TermsOfServiceAgreementStateServiceRestInterceptor + ] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or TermsOfServiceAgreementStateServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _GetTermsOfServiceAgreementState(TermsOfServiceAgreementStateServiceRestStub): + def __hash__(self): + return hash("GetTermsOfServiceAgreementState") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + r"""Call the get terms of service + agreement state method over HTTP. + + Args: + request (~.termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest): + The request object. Request message for the + ``GetTermsOfServiceAgreementState`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.termsofserviceagreementstate.TermsOfServiceAgreementState: + This resource represents the agreement state for a given + account and terms of service kind. The state is as + follows: + + - If the merchant has accepted a terms of service: + `accepted `__ + will be populated, otherwise it will be empty + - If the merchant must sign a terms of service: + `required `__ + will be populated, otherwise it will be empty. + + Note that both + `required `__ and + `accepted `__ can + be present. In this case the ``accepted`` terms of + services will have an expiration date set in the + `valid_until `__ field. The + ``required`` terms of services need to be accepted + before ``valid_until`` in order for the account to + continue having a valid agreement. When accepting new + terms of services we expect 3Ps to display the text + associated with the given terms of service agreement + (the url to the file containing the text is added in the + Required message below as + `tos_file_uri `__. The actual + acceptance of the terms of service is done by calling + accept on the `TermsOfService `__ + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/termsOfServiceAgreementStates/*}", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_get_terms_of_service_agreement_state( + request, metadata + ) + pb_request = ( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = termsofserviceagreementstate.TermsOfServiceAgreementState() + pb_resp = termsofserviceagreementstate.TermsOfServiceAgreementState.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_terms_of_service_agreement_state(resp) + return resp + + class _RetrieveForApplicationTermsOfServiceAgreementState( + TermsOfServiceAgreementStateServiceRestStub + ): + def __hash__(self): + return hash("RetrieveForApplicationTermsOfServiceAgreementState") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofserviceagreementstate.TermsOfServiceAgreementState: + r"""Call the retrieve for application + terms of service agreement state method over HTTP. + + Args: + request (~.termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest): + The request object. Request message for the + ``RetrieveForApplicationTermsOfServiceAgreementState`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.termsofserviceagreementstate.TermsOfServiceAgreementState: + This resource represents the agreement state for a given + account and terms of service kind. The state is as + follows: + + - If the merchant has accepted a terms of service: + `accepted `__ + will be populated, otherwise it will be empty + - If the merchant must sign a terms of service: + `required `__ + will be populated, otherwise it will be empty. + + Note that both + `required `__ and + `accepted `__ can + be present. In this case the ``accepted`` terms of + services will have an expiration date set in the + `valid_until `__ field. The + ``required`` terms of services need to be accepted + before ``valid_until`` in order for the account to + continue having a valid agreement. When accepting new + terms of services we expect 3Ps to display the text + associated with the given terms of service agreement + (the url to the file containing the text is added in the + Required message below as + `tos_file_uri `__. The actual + acceptance of the terms of service is done by calling + accept on the `TermsOfService `__ + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/termsOfServiceAgreementStates:retrieveForApplication", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_retrieve_for_application_terms_of_service_agreement_state( + request, metadata + ) + pb_request = termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = termsofserviceagreementstate.TermsOfServiceAgreementState() + pb_resp = termsofserviceagreementstate.TermsOfServiceAgreementState.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_for_application_terms_of_service_agreement_state( + resp + ) + return resp + + @property + def get_terms_of_service_agreement_state( + self, + ) -> Callable[ + [termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest], + termsofserviceagreementstate.TermsOfServiceAgreementState, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTermsOfServiceAgreementState(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_for_application_terms_of_service_agreement_state( + self, + ) -> Callable[ + [ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest + ], + termsofserviceagreementstate.TermsOfServiceAgreementState, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveForApplicationTermsOfServiceAgreementState(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TermsOfServiceAgreementStateServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/__init__.py new file mode 100644 index 000000000000..1bd930a18bb1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TermsOfServiceServiceAsyncClient +from .client import TermsOfServiceServiceClient + +__all__ = ( + "TermsOfServiceServiceClient", + "TermsOfServiceServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py new file mode 100644 index 000000000000..723eb05fceaf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py @@ -0,0 +1,567 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + termsofservice, + termsofservicekind, +) + +from .client import TermsOfServiceServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, TermsOfServiceServiceTransport +from .transports.grpc_asyncio import TermsOfServiceServiceGrpcAsyncIOTransport + + +class TermsOfServiceServiceAsyncClient: + """Service to support ``TermsOfService`` API.""" + + _client: TermsOfServiceServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = TermsOfServiceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TermsOfServiceServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = TermsOfServiceServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = TermsOfServiceServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(TermsOfServiceServiceClient.account_path) + parse_account_path = staticmethod(TermsOfServiceServiceClient.parse_account_path) + terms_of_service_path = staticmethod( + TermsOfServiceServiceClient.terms_of_service_path + ) + parse_terms_of_service_path = staticmethod( + TermsOfServiceServiceClient.parse_terms_of_service_path + ) + common_billing_account_path = staticmethod( + TermsOfServiceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TermsOfServiceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TermsOfServiceServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + TermsOfServiceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TermsOfServiceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TermsOfServiceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(TermsOfServiceServiceClient.common_project_path) + parse_common_project_path = staticmethod( + TermsOfServiceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + TermsOfServiceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + TermsOfServiceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceServiceAsyncClient: The constructed client. + """ + return TermsOfServiceServiceClient.from_service_account_info.__func__(TermsOfServiceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceServiceAsyncClient: The constructed client. + """ + return TermsOfServiceServiceClient.from_service_account_file.__func__(TermsOfServiceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TermsOfServiceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TermsOfServiceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TermsOfServiceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(TermsOfServiceServiceClient).get_transport_class, + type(TermsOfServiceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + TermsOfServiceServiceTransport, + Callable[..., TermsOfServiceServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the terms of service service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,TermsOfServiceServiceTransport,Callable[..., TermsOfServiceServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TermsOfServiceServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TermsOfServiceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_terms_of_service( + self, + request: Optional[Union[termsofservice.GetTermsOfServiceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofservice.TermsOfService: + r"""Retrieves the ``TermsOfService`` associated with the provided + version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_terms_of_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceRequest, dict]]): + The request object. Request message for the ``GetTermsOfService`` method. + name (:class:`str`): + Required. The resource name of the terms of service + version. Format: ``termsOfService/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfService: + A TermsOfService. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, termsofservice.GetTermsOfServiceRequest): + request = termsofservice.GetTermsOfServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_terms_of_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retrieve_latest_terms_of_service( + self, + request: Optional[ + Union[termsofservice.RetrieveLatestTermsOfServiceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofservice.TermsOfService: + r"""Retrieves the latest version of the ``TermsOfService`` for a + given ``kind`` and ``region_code``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_retrieve_latest_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + ) + + # Make the request + response = await client.retrieve_latest_terms_of_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.RetrieveLatestTermsOfServiceRequest, dict]]): + The request object. Request message for the ``RetrieveLatestTermsOfService`` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfService: + A TermsOfService. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, termsofservice.RetrieveLatestTermsOfServiceRequest): + request = termsofservice.RetrieveLatestTermsOfServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.retrieve_latest_terms_of_service + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def accept_terms_of_service( + self, + request: Optional[ + Union[termsofservice.AcceptTermsOfServiceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Accepts a ``TermsOfService``. Executing this method requires + admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_accept_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.AcceptTermsOfServiceRequest( + name="name_value", + account="account_value", + region_code="region_code_value", + ) + + # Make the request + await client.accept_terms_of_service(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.AcceptTermsOfServiceRequest, dict]]): + The request object. Request message for the ``AcceptTermsOfService`` method. + name (:class:`str`): + Required. The resource name of the terms of service + version. Format: ``termsOfService/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, termsofservice.AcceptTermsOfServiceRequest): + request = termsofservice.AcceptTermsOfServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.accept_terms_of_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "TermsOfServiceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TermsOfServiceServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py new file mode 100644 index 000000000000..6c1b4c48e2ed --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py @@ -0,0 +1,990 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + termsofservice, + termsofservicekind, +) + +from .transports.base import DEFAULT_CLIENT_INFO, TermsOfServiceServiceTransport +from .transports.grpc import TermsOfServiceServiceGrpcTransport +from .transports.grpc_asyncio import TermsOfServiceServiceGrpcAsyncIOTransport +from .transports.rest import TermsOfServiceServiceRestTransport + + +class TermsOfServiceServiceClientMeta(type): + """Metaclass for the TermsOfServiceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[TermsOfServiceServiceTransport]] + _transport_registry["grpc"] = TermsOfServiceServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TermsOfServiceServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TermsOfServiceServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TermsOfServiceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TermsOfServiceServiceClient(metaclass=TermsOfServiceServiceClientMeta): + """Service to support ``TermsOfService`` API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TermsOfServiceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TermsOfServiceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TermsOfServiceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def terms_of_service_path( + version: str, + ) -> str: + """Returns a fully-qualified terms_of_service string.""" + return "termsOfService/{version}".format( + version=version, + ) + + @staticmethod + def parse_terms_of_service_path(path: str) -> Dict[str, str]: + """Parses a terms_of_service path into its component segments.""" + m = re.match(r"^termsOfService/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = TermsOfServiceServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = TermsOfServiceServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + TermsOfServiceServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = TermsOfServiceServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = TermsOfServiceServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or TermsOfServiceServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + TermsOfServiceServiceTransport, + Callable[..., TermsOfServiceServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the terms of service service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,TermsOfServiceServiceTransport,Callable[..., TermsOfServiceServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TermsOfServiceServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = TermsOfServiceServiceClient._read_environment_variables() + self._client_cert_source = TermsOfServiceServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = TermsOfServiceServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, TermsOfServiceServiceTransport) + if transport_provided: + # transport is a TermsOfServiceServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(TermsOfServiceServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or TermsOfServiceServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[TermsOfServiceServiceTransport], + Callable[..., TermsOfServiceServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TermsOfServiceServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_terms_of_service( + self, + request: Optional[Union[termsofservice.GetTermsOfServiceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofservice.TermsOfService: + r"""Retrieves the ``TermsOfService`` associated with the provided + version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceRequest( + name="name_value", + ) + + # Make the request + response = client.get_terms_of_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceRequest, dict]): + The request object. Request message for the ``GetTermsOfService`` method. + name (str): + Required. The resource name of the terms of service + version. Format: ``termsOfService/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfService: + A TermsOfService. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, termsofservice.GetTermsOfServiceRequest): + request = termsofservice.GetTermsOfServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_terms_of_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retrieve_latest_terms_of_service( + self, + request: Optional[ + Union[termsofservice.RetrieveLatestTermsOfServiceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofservice.TermsOfService: + r"""Retrieves the latest version of the ``TermsOfService`` for a + given ``kind`` and ``region_code``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_retrieve_latest_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + ) + + # Make the request + response = client.retrieve_latest_terms_of_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.RetrieveLatestTermsOfServiceRequest, dict]): + The request object. Request message for the ``RetrieveLatestTermsOfService`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.TermsOfService: + A TermsOfService. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, termsofservice.RetrieveLatestTermsOfServiceRequest): + request = termsofservice.RetrieveLatestTermsOfServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_latest_terms_of_service + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def accept_terms_of_service( + self, + request: Optional[ + Union[termsofservice.AcceptTermsOfServiceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Accepts a ``TermsOfService``. Executing this method requires + admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_accept_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.AcceptTermsOfServiceRequest( + name="name_value", + account="account_value", + region_code="region_code_value", + ) + + # Make the request + client.accept_terms_of_service(request=request) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.AcceptTermsOfServiceRequest, dict]): + The request object. Request message for the ``AcceptTermsOfService`` method. + name (str): + Required. The resource name of the terms of service + version. Format: ``termsOfService/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, termsofservice.AcceptTermsOfServiceRequest): + request = termsofservice.AcceptTermsOfServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.accept_terms_of_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "TermsOfServiceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TermsOfServiceServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/__init__.py new file mode 100644 index 000000000000..9eda608b7ced --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TermsOfServiceServiceTransport +from .grpc import TermsOfServiceServiceGrpcTransport +from .grpc_asyncio import TermsOfServiceServiceGrpcAsyncIOTransport +from .rest import ( + TermsOfServiceServiceRestInterceptor, + TermsOfServiceServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TermsOfServiceServiceTransport]] +_transport_registry["grpc"] = TermsOfServiceServiceGrpcTransport +_transport_registry["grpc_asyncio"] = TermsOfServiceServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TermsOfServiceServiceRestTransport + +__all__ = ( + "TermsOfServiceServiceTransport", + "TermsOfServiceServiceGrpcTransport", + "TermsOfServiceServiceGrpcAsyncIOTransport", + "TermsOfServiceServiceRestTransport", + "TermsOfServiceServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/base.py new file mode 100644 index 000000000000..643e86a6ac96 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/base.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import termsofservice + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TermsOfServiceServiceTransport(abc.ABC): + """Abstract transport class for TermsOfServiceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_terms_of_service: gapic_v1.method.wrap_method( + self.get_terms_of_service, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_latest_terms_of_service: gapic_v1.method.wrap_method( + self.retrieve_latest_terms_of_service, + default_timeout=None, + client_info=client_info, + ), + self.accept_terms_of_service: gapic_v1.method.wrap_method( + self.accept_terms_of_service, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_terms_of_service( + self, + ) -> Callable[ + [termsofservice.GetTermsOfServiceRequest], + Union[termsofservice.TermsOfService, Awaitable[termsofservice.TermsOfService]], + ]: + raise NotImplementedError() + + @property + def retrieve_latest_terms_of_service( + self, + ) -> Callable[ + [termsofservice.RetrieveLatestTermsOfServiceRequest], + Union[termsofservice.TermsOfService, Awaitable[termsofservice.TermsOfService]], + ]: + raise NotImplementedError() + + @property + def accept_terms_of_service( + self, + ) -> Callable[ + [termsofservice.AcceptTermsOfServiceRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TermsOfServiceServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/grpc.py new file mode 100644 index 000000000000..3a0fa7a82fb2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/grpc.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofservice + +from .base import DEFAULT_CLIENT_INFO, TermsOfServiceServiceTransport + + +class TermsOfServiceServiceGrpcTransport(TermsOfServiceServiceTransport): + """gRPC backend transport for TermsOfServiceService. + + Service to support ``TermsOfService`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_terms_of_service( + self, + ) -> Callable[ + [termsofservice.GetTermsOfServiceRequest], termsofservice.TermsOfService + ]: + r"""Return a callable for the get terms of service method over gRPC. + + Retrieves the ``TermsOfService`` associated with the provided + version. + + Returns: + Callable[[~.GetTermsOfServiceRequest], + ~.TermsOfService]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_terms_of_service" not in self._stubs: + self._stubs["get_terms_of_service"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceService/GetTermsOfService", + request_serializer=termsofservice.GetTermsOfServiceRequest.serialize, + response_deserializer=termsofservice.TermsOfService.deserialize, + ) + return self._stubs["get_terms_of_service"] + + @property + def retrieve_latest_terms_of_service( + self, + ) -> Callable[ + [termsofservice.RetrieveLatestTermsOfServiceRequest], + termsofservice.TermsOfService, + ]: + r"""Return a callable for the retrieve latest terms of + service method over gRPC. + + Retrieves the latest version of the ``TermsOfService`` for a + given ``kind`` and ``region_code``. + + Returns: + Callable[[~.RetrieveLatestTermsOfServiceRequest], + ~.TermsOfService]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_latest_terms_of_service" not in self._stubs: + self._stubs[ + "retrieve_latest_terms_of_service" + ] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceService/RetrieveLatestTermsOfService", + request_serializer=termsofservice.RetrieveLatestTermsOfServiceRequest.serialize, + response_deserializer=termsofservice.TermsOfService.deserialize, + ) + return self._stubs["retrieve_latest_terms_of_service"] + + @property + def accept_terms_of_service( + self, + ) -> Callable[[termsofservice.AcceptTermsOfServiceRequest], empty_pb2.Empty]: + r"""Return a callable for the accept terms of service method over gRPC. + + Accepts a ``TermsOfService``. Executing this method requires + admin access. + + Returns: + Callable[[~.AcceptTermsOfServiceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "accept_terms_of_service" not in self._stubs: + self._stubs["accept_terms_of_service"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceService/AcceptTermsOfService", + request_serializer=termsofservice.AcceptTermsOfServiceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["accept_terms_of_service"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TermsOfServiceServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..99017970d87c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/grpc_asyncio.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofservice + +from .base import DEFAULT_CLIENT_INFO, TermsOfServiceServiceTransport +from .grpc import TermsOfServiceServiceGrpcTransport + + +class TermsOfServiceServiceGrpcAsyncIOTransport(TermsOfServiceServiceTransport): + """gRPC AsyncIO backend transport for TermsOfServiceService. + + Service to support ``TermsOfService`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_terms_of_service( + self, + ) -> Callable[ + [termsofservice.GetTermsOfServiceRequest], + Awaitable[termsofservice.TermsOfService], + ]: + r"""Return a callable for the get terms of service method over gRPC. + + Retrieves the ``TermsOfService`` associated with the provided + version. + + Returns: + Callable[[~.GetTermsOfServiceRequest], + Awaitable[~.TermsOfService]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_terms_of_service" not in self._stubs: + self._stubs["get_terms_of_service"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceService/GetTermsOfService", + request_serializer=termsofservice.GetTermsOfServiceRequest.serialize, + response_deserializer=termsofservice.TermsOfService.deserialize, + ) + return self._stubs["get_terms_of_service"] + + @property + def retrieve_latest_terms_of_service( + self, + ) -> Callable[ + [termsofservice.RetrieveLatestTermsOfServiceRequest], + Awaitable[termsofservice.TermsOfService], + ]: + r"""Return a callable for the retrieve latest terms of + service method over gRPC. + + Retrieves the latest version of the ``TermsOfService`` for a + given ``kind`` and ``region_code``. + + Returns: + Callable[[~.RetrieveLatestTermsOfServiceRequest], + Awaitable[~.TermsOfService]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_latest_terms_of_service" not in self._stubs: + self._stubs[ + "retrieve_latest_terms_of_service" + ] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceService/RetrieveLatestTermsOfService", + request_serializer=termsofservice.RetrieveLatestTermsOfServiceRequest.serialize, + response_deserializer=termsofservice.TermsOfService.deserialize, + ) + return self._stubs["retrieve_latest_terms_of_service"] + + @property + def accept_terms_of_service( + self, + ) -> Callable[ + [termsofservice.AcceptTermsOfServiceRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the accept terms of service method over gRPC. + + Accepts a ``TermsOfService``. Executing this method requires + admin access. + + Returns: + Callable[[~.AcceptTermsOfServiceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "accept_terms_of_service" not in self._stubs: + self._stubs["accept_terms_of_service"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.TermsOfServiceService/AcceptTermsOfService", + request_serializer=termsofservice.AcceptTermsOfServiceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["accept_terms_of_service"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_terms_of_service: gapic_v1.method_async.wrap_method( + self.get_terms_of_service, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_latest_terms_of_service: gapic_v1.method_async.wrap_method( + self.retrieve_latest_terms_of_service, + default_timeout=None, + client_info=client_info, + ), + self.accept_terms_of_service: gapic_v1.method_async.wrap_method( + self.accept_terms_of_service, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TermsOfServiceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py new file mode 100644 index 000000000000..c2a44909985b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py @@ -0,0 +1,531 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofservice + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TermsOfServiceServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TermsOfServiceServiceRestInterceptor: + """Interceptor for TermsOfServiceService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TermsOfServiceServiceRestTransport. + + .. code-block:: python + class MyCustomTermsOfServiceServiceInterceptor(TermsOfServiceServiceRestInterceptor): + def pre_accept_terms_of_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_terms_of_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_terms_of_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_latest_terms_of_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_latest_terms_of_service(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TermsOfServiceServiceRestTransport(interceptor=MyCustomTermsOfServiceServiceInterceptor()) + client = TermsOfServiceServiceClient(transport=transport) + + + """ + + def pre_accept_terms_of_service( + self, + request: termsofservice.AcceptTermsOfServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[termsofservice.AcceptTermsOfServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for accept_terms_of_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the TermsOfServiceService server. + """ + return request, metadata + + def pre_get_terms_of_service( + self, + request: termsofservice.GetTermsOfServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[termsofservice.GetTermsOfServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_terms_of_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the TermsOfServiceService server. + """ + return request, metadata + + def post_get_terms_of_service( + self, response: termsofservice.TermsOfService + ) -> termsofservice.TermsOfService: + """Post-rpc interceptor for get_terms_of_service + + Override in a subclass to manipulate the response + after it is returned by the TermsOfServiceService server but before + it is returned to user code. + """ + return response + + def pre_retrieve_latest_terms_of_service( + self, + request: termsofservice.RetrieveLatestTermsOfServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + termsofservice.RetrieveLatestTermsOfServiceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for retrieve_latest_terms_of_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the TermsOfServiceService server. + """ + return request, metadata + + def post_retrieve_latest_terms_of_service( + self, response: termsofservice.TermsOfService + ) -> termsofservice.TermsOfService: + """Post-rpc interceptor for retrieve_latest_terms_of_service + + Override in a subclass to manipulate the response + after it is returned by the TermsOfServiceService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TermsOfServiceServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TermsOfServiceServiceRestInterceptor + + +class TermsOfServiceServiceRestTransport(TermsOfServiceServiceTransport): + """REST backend transport for TermsOfServiceService. + + Service to support ``TermsOfService`` API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TermsOfServiceServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TermsOfServiceServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AcceptTermsOfService(TermsOfServiceServiceRestStub): + def __hash__(self): + return hash("AcceptTermsOfService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "account": "", + "regionCode": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: termsofservice.AcceptTermsOfServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the accept terms of service method over HTTP. + + Args: + request (~.termsofservice.AcceptTermsOfServiceRequest): + The request object. Request message for the ``AcceptTermsOfService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=termsOfService/*}:accept", + }, + ] + request, metadata = self._interceptor.pre_accept_terms_of_service( + request, metadata + ) + pb_request = termsofservice.AcceptTermsOfServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetTermsOfService(TermsOfServiceServiceRestStub): + def __hash__(self): + return hash("GetTermsOfService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: termsofservice.GetTermsOfServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofservice.TermsOfService: + r"""Call the get terms of service method over HTTP. + + Args: + request (~.termsofservice.GetTermsOfServiceRequest): + The request object. Request message for the ``GetTermsOfService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.termsofservice.TermsOfService: + A ``TermsOfService``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=termsOfService/*}", + }, + ] + request, metadata = self._interceptor.pre_get_terms_of_service( + request, metadata + ) + pb_request = termsofservice.GetTermsOfServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = termsofservice.TermsOfService() + pb_resp = termsofservice.TermsOfService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_terms_of_service(resp) + return resp + + class _RetrieveLatestTermsOfService(TermsOfServiceServiceRestStub): + def __hash__(self): + return hash("RetrieveLatestTermsOfService") + + def __call__( + self, + request: termsofservice.RetrieveLatestTermsOfServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> termsofservice.TermsOfService: + r"""Call the retrieve latest terms of + service method over HTTP. + + Args: + request (~.termsofservice.RetrieveLatestTermsOfServiceRequest): + The request object. Request message for the ``RetrieveLatestTermsOfService`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.termsofservice.TermsOfService: + A ``TermsOfService``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/termsOfService:retrieveLatest", + }, + ] + request, metadata = self._interceptor.pre_retrieve_latest_terms_of_service( + request, metadata + ) + pb_request = termsofservice.RetrieveLatestTermsOfServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = termsofservice.TermsOfService() + pb_resp = termsofservice.TermsOfService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_latest_terms_of_service(resp) + return resp + + @property + def accept_terms_of_service( + self, + ) -> Callable[[termsofservice.AcceptTermsOfServiceRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AcceptTermsOfService(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_terms_of_service( + self, + ) -> Callable[ + [termsofservice.GetTermsOfServiceRequest], termsofservice.TermsOfService + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTermsOfService(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_latest_terms_of_service( + self, + ) -> Callable[ + [termsofservice.RetrieveLatestTermsOfServiceRequest], + termsofservice.TermsOfService, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveLatestTermsOfService(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TermsOfServiceServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/__init__.py new file mode 100644 index 000000000000..6f8084df1f12 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import UserServiceAsyncClient +from .client import UserServiceClient + +__all__ = ( + "UserServiceClient", + "UserServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py new file mode 100644 index 000000000000..92f1f68df541 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py @@ -0,0 +1,830 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.user_service import pagers +from google.shopping.merchant_accounts_v1beta.types import accessright +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + +from .client import UserServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, UserServiceTransport +from .transports.grpc_asyncio import UserServiceGrpcAsyncIOTransport + + +class UserServiceAsyncClient: + """Service to support user API.""" + + _client: UserServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = UserServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = UserServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = UserServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = UserServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(UserServiceClient.account_path) + parse_account_path = staticmethod(UserServiceClient.parse_account_path) + user_path = staticmethod(UserServiceClient.user_path) + parse_user_path = staticmethod(UserServiceClient.parse_user_path) + common_billing_account_path = staticmethod( + UserServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + UserServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(UserServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(UserServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(UserServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + UserServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(UserServiceClient.common_project_path) + parse_common_project_path = staticmethod( + UserServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(UserServiceClient.common_location_path) + parse_common_location_path = staticmethod( + UserServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserServiceAsyncClient: The constructed client. + """ + return UserServiceClient.from_service_account_info.__func__(UserServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserServiceAsyncClient: The constructed client. + """ + return UserServiceClient.from_service_account_file.__func__(UserServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return UserServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> UserServiceTransport: + """Returns the transport used by the client instance. + + Returns: + UserServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(UserServiceClient).get_transport_class, type(UserServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, UserServiceTransport, Callable[..., UserServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the user service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,UserServiceTransport,Callable[..., UserServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the UserServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = UserServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_user( + self, + request: Optional[Union[user.GetUserRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user.User: + r"""Retrieves a Merchant Center account user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetUserRequest( + name="name_value", + ) + + # Make the request + response = await client.get_user(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetUserRequest, dict]]): + The request object. Request message for the ``GetUser`` method. + name (:class:`str`): + Required. The name of the user to retrieve. Format: + ``accounts/{account}/users/{email}`` + + It is also possible to retrieve the user corresponding + to the caller by using ``me`` rather than an email + address as in ``accounts/{account}/users/me``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.User: + A + [user](https://support.google.com/merchants/answer/12160472). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, user.GetUserRequest): + request = user.GetUserRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_user] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_user( + self, + request: Optional[Union[gsma_user.CreateUserRequest, dict]] = None, + *, + parent: Optional[str] = None, + user: Optional[gsma_user.User] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_user.User: + r"""Creates a Merchant Center account user. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_create_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", + ) + + # Make the request + response = await client.create_user(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.CreateUserRequest, dict]]): + The request object. Request message for the ``CreateUser`` method. + parent (:class:`str`): + Required. The resource name of the account for which a + user will be created. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + user (:class:`google.shopping.merchant_accounts_v1beta.types.User`): + Required. The user to create. + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.User: + A + [user](https://support.google.com/merchants/answer/12160472). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, user]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_user.CreateUserRequest): + request = gsma_user.CreateUserRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if user is not None: + request.user = user + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_user + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_user( + self, + request: Optional[Union[user.DeleteUserRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Merchant Center account user. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_delete_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteUserRequest( + name="name_value", + ) + + # Make the request + await client.delete_user(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.DeleteUserRequest, dict]]): + The request object. Request message for the ``DeleteUser`` method. + name (:class:`str`): + Required. The name of the user to delete. Format: + ``accounts/{account}/users/{email}`` + + It is also possible to delete the user corresponding to + the caller by using ``me`` rather than an email address + as in ``accounts/{account}/users/me``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, user.DeleteUserRequest): + request = user.DeleteUserRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_user + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_user( + self, + request: Optional[Union[gsma_user.UpdateUserRequest, dict]] = None, + *, + user: Optional[gsma_user.User] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_user.User: + r"""Updates a Merchant Center account user. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateUserRequest( + ) + + # Make the request + response = await client.update_user(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateUserRequest, dict]]): + The request object. Request message for the ``UpdateUser`` method. + user (:class:`google.shopping.merchant_accounts_v1beta.types.User`): + Required. The new version of the user. + + Use ``me`` to refer to your own email address, for + example ``accounts/{account}/users/me``. + + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.User: + A + [user](https://support.google.com/merchants/answer/12160472). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([user, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_user.UpdateUserRequest): + request = gsma_user.UpdateUserRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if user is not None: + request.user = user + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_user + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("user.name", request.user.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_users( + self, + request: Optional[Union[user.ListUsersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsersAsyncPager: + r"""Lists all users of a Merchant Center account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_list_users(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.ListUsersRequest, dict]]): + The request object. Request message for the ``ListUsers`` method. + parent (:class:`str`): + Required. The parent, which owns this collection of + users. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.user_service.pagers.ListUsersAsyncPager: + Response message for the ListUsers method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, user.ListUsersRequest): + request = user.ListUsersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_users + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "UserServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("UserServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py new file mode 100644 index 000000000000..f227484e6a86 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py @@ -0,0 +1,1251 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.services.user_service import pagers +from google.shopping.merchant_accounts_v1beta.types import accessright +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + +from .transports.base import DEFAULT_CLIENT_INFO, UserServiceTransport +from .transports.grpc import UserServiceGrpcTransport +from .transports.grpc_asyncio import UserServiceGrpcAsyncIOTransport +from .transports.rest import UserServiceRestTransport + + +class UserServiceClientMeta(type): + """Metaclass for the UserService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[UserServiceTransport]] + _transport_registry["grpc"] = UserServiceGrpcTransport + _transport_registry["grpc_asyncio"] = UserServiceGrpcAsyncIOTransport + _transport_registry["rest"] = UserServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[UserServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class UserServiceClient(metaclass=UserServiceClientMeta): + """Service to support user API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> UserServiceTransport: + """Returns the transport used by the client instance. + + Returns: + UserServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def user_path( + account: str, + email: str, + ) -> str: + """Returns a fully-qualified user string.""" + return "accounts/{account}/users/{email}".format( + account=account, + email=email, + ) + + @staticmethod + def parse_user_path(path: str) -> Dict[str, str]: + """Parses a user path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/users/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = UserServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = UserServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = UserServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = UserServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = UserServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or UserServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, UserServiceTransport, Callable[..., UserServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the user service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,UserServiceTransport,Callable[..., UserServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the UserServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = UserServiceClient._read_environment_variables() + self._client_cert_source = UserServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = UserServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, UserServiceTransport) + if transport_provided: + # transport is a UserServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(UserServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or UserServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[UserServiceTransport], Callable[..., UserServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., UserServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_user( + self, + request: Optional[Union[user.GetUserRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user.User: + r"""Retrieves a Merchant Center account user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetUserRequest( + name="name_value", + ) + + # Make the request + response = client.get_user(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetUserRequest, dict]): + The request object. Request message for the ``GetUser`` method. + name (str): + Required. The name of the user to retrieve. Format: + ``accounts/{account}/users/{email}`` + + It is also possible to retrieve the user corresponding + to the caller by using ``me`` rather than an email + address as in ``accounts/{account}/users/me``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.User: + A + [user](https://support.google.com/merchants/answer/12160472). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, user.GetUserRequest): + request = user.GetUserRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_user] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_user( + self, + request: Optional[Union[gsma_user.CreateUserRequest, dict]] = None, + *, + parent: Optional[str] = None, + user: Optional[gsma_user.User] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_user.User: + r"""Creates a Merchant Center account user. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_create_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", + ) + + # Make the request + response = client.create_user(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.CreateUserRequest, dict]): + The request object. Request message for the ``CreateUser`` method. + parent (str): + Required. The resource name of the account for which a + user will be created. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + user (google.shopping.merchant_accounts_v1beta.types.User): + Required. The user to create. + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.User: + A + [user](https://support.google.com/merchants/answer/12160472). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, user]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_user.CreateUserRequest): + request = gsma_user.CreateUserRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if user is not None: + request.user = user + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_user] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_user( + self, + request: Optional[Union[user.DeleteUserRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a Merchant Center account user. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_delete_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteUserRequest( + name="name_value", + ) + + # Make the request + client.delete_user(request=request) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.DeleteUserRequest, dict]): + The request object. Request message for the ``DeleteUser`` method. + name (str): + Required. The name of the user to delete. Format: + ``accounts/{account}/users/{email}`` + + It is also possible to delete the user corresponding to + the caller by using ``me`` rather than an email address + as in ``accounts/{account}/users/me``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, user.DeleteUserRequest): + request = user.DeleteUserRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_user] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_user( + self, + request: Optional[Union[gsma_user.UpdateUserRequest, dict]] = None, + *, + user: Optional[gsma_user.User] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_user.User: + r"""Updates a Merchant Center account user. Executing + this method requires admin access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateUserRequest( + ) + + # Make the request + response = client.update_user(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateUserRequest, dict]): + The request object. Request message for the ``UpdateUser`` method. + user (google.shopping.merchant_accounts_v1beta.types.User): + Required. The new version of the user. + + Use ``me`` to refer to your own email address, for + example ``accounts/{account}/users/me``. + + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.User: + A + [user](https://support.google.com/merchants/answer/12160472). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([user, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsma_user.UpdateUserRequest): + request = gsma_user.UpdateUserRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if user is not None: + request.user = user + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_user] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("user.name", request.user.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_users( + self, + request: Optional[Union[user.ListUsersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsersPager: + r"""Lists all users of a Merchant Center account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_list_users(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_users(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.ListUsersRequest, dict]): + The request object. Request message for the ``ListUsers`` method. + parent (str): + Required. The parent, which owns this collection of + users. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.services.user_service.pagers.ListUsersPager: + Response message for the ListUsers method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, user.ListUsersRequest): + request = user.ListUsersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_users] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "UserServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("UserServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/pagers.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/pagers.py new file mode 100644 index 000000000000..0270a2b4887c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_accounts_v1beta.types import user + + +class ListUsersPager: + """A pager for iterating through ``list_users`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListUsersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``users`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsers`` requests and continue to iterate + through the ``users`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., user.ListUsersResponse], + request: user.ListUsersRequest, + response: user.ListUsersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListUsersRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListUsersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = user.ListUsersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[user.ListUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[user.User]: + for page in self.pages: + yield from page.users + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsersAsyncPager: + """A pager for iterating through ``list_users`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_accounts_v1beta.types.ListUsersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``users`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsers`` requests and continue to iterate + through the ``users`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_accounts_v1beta.types.ListUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[user.ListUsersResponse]], + request: user.ListUsersRequest, + response: user.ListUsersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_accounts_v1beta.types.ListUsersRequest): + The initial request object. + response (google.shopping.merchant_accounts_v1beta.types.ListUsersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = user.ListUsersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[user.ListUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[user.User]: + async def async_generator(): + async for page in self.pages: + for response in page.users: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/__init__.py new file mode 100644 index 000000000000..d1b4573ff9d7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import UserServiceTransport +from .grpc import UserServiceGrpcTransport +from .grpc_asyncio import UserServiceGrpcAsyncIOTransport +from .rest import UserServiceRestInterceptor, UserServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[UserServiceTransport]] +_transport_registry["grpc"] = UserServiceGrpcTransport +_transport_registry["grpc_asyncio"] = UserServiceGrpcAsyncIOTransport +_transport_registry["rest"] = UserServiceRestTransport + +__all__ = ( + "UserServiceTransport", + "UserServiceGrpcTransport", + "UserServiceGrpcAsyncIOTransport", + "UserServiceRestTransport", + "UserServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/base.py new file mode 100644 index 000000000000..8260a4238551 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/base.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class UserServiceTransport(abc.ABC): + """Abstract transport class for UserService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_user: gapic_v1.method.wrap_method( + self.get_user, + default_timeout=None, + client_info=client_info, + ), + self.create_user: gapic_v1.method.wrap_method( + self.create_user, + default_timeout=None, + client_info=client_info, + ), + self.delete_user: gapic_v1.method.wrap_method( + self.delete_user, + default_timeout=None, + client_info=client_info, + ), + self.update_user: gapic_v1.method.wrap_method( + self.update_user, + default_timeout=None, + client_info=client_info, + ), + self.list_users: gapic_v1.method.wrap_method( + self.list_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_user( + self, + ) -> Callable[[user.GetUserRequest], Union[user.User, Awaitable[user.User]]]: + raise NotImplementedError() + + @property + def create_user( + self, + ) -> Callable[ + [gsma_user.CreateUserRequest], Union[gsma_user.User, Awaitable[gsma_user.User]] + ]: + raise NotImplementedError() + + @property + def delete_user( + self, + ) -> Callable[ + [user.DeleteUserRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def update_user( + self, + ) -> Callable[ + [gsma_user.UpdateUserRequest], Union[gsma_user.User, Awaitable[gsma_user.User]] + ]: + raise NotImplementedError() + + @property + def list_users( + self, + ) -> Callable[ + [user.ListUsersRequest], + Union[user.ListUsersResponse, Awaitable[user.ListUsersResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("UserServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/grpc.py new file mode 100644 index 000000000000..c6b7834503d6 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/grpc.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + +from .base import DEFAULT_CLIENT_INFO, UserServiceTransport + + +class UserServiceGrpcTransport(UserServiceTransport): + """gRPC backend transport for UserService. + + Service to support user API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_user(self) -> Callable[[user.GetUserRequest], user.User]: + r"""Return a callable for the get user method over gRPC. + + Retrieves a Merchant Center account user. + + Returns: + Callable[[~.GetUserRequest], + ~.User]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_user" not in self._stubs: + self._stubs["get_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/GetUser", + request_serializer=user.GetUserRequest.serialize, + response_deserializer=user.User.deserialize, + ) + return self._stubs["get_user"] + + @property + def create_user(self) -> Callable[[gsma_user.CreateUserRequest], gsma_user.User]: + r"""Return a callable for the create user method over gRPC. + + Creates a Merchant Center account user. Executing + this method requires admin access. + + Returns: + Callable[[~.CreateUserRequest], + ~.User]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_user" not in self._stubs: + self._stubs["create_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/CreateUser", + request_serializer=gsma_user.CreateUserRequest.serialize, + response_deserializer=gsma_user.User.deserialize, + ) + return self._stubs["create_user"] + + @property + def delete_user(self) -> Callable[[user.DeleteUserRequest], empty_pb2.Empty]: + r"""Return a callable for the delete user method over gRPC. + + Deletes a Merchant Center account user. Executing + this method requires admin access. + + Returns: + Callable[[~.DeleteUserRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_user" not in self._stubs: + self._stubs["delete_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/DeleteUser", + request_serializer=user.DeleteUserRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_user"] + + @property + def update_user(self) -> Callable[[gsma_user.UpdateUserRequest], gsma_user.User]: + r"""Return a callable for the update user method over gRPC. + + Updates a Merchant Center account user. Executing + this method requires admin access. + + Returns: + Callable[[~.UpdateUserRequest], + ~.User]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_user" not in self._stubs: + self._stubs["update_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/UpdateUser", + request_serializer=gsma_user.UpdateUserRequest.serialize, + response_deserializer=gsma_user.User.deserialize, + ) + return self._stubs["update_user"] + + @property + def list_users(self) -> Callable[[user.ListUsersRequest], user.ListUsersResponse]: + r"""Return a callable for the list users method over gRPC. + + Lists all users of a Merchant Center account. + + Returns: + Callable[[~.ListUsersRequest], + ~.ListUsersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_users" not in self._stubs: + self._stubs["list_users"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/ListUsers", + request_serializer=user.ListUsersRequest.serialize, + response_deserializer=user.ListUsersResponse.deserialize, + ) + return self._stubs["list_users"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("UserServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..925e09aad1a6 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/grpc_asyncio.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + +from .base import DEFAULT_CLIENT_INFO, UserServiceTransport +from .grpc import UserServiceGrpcTransport + + +class UserServiceGrpcAsyncIOTransport(UserServiceTransport): + """gRPC AsyncIO backend transport for UserService. + + Service to support user API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_user(self) -> Callable[[user.GetUserRequest], Awaitable[user.User]]: + r"""Return a callable for the get user method over gRPC. + + Retrieves a Merchant Center account user. + + Returns: + Callable[[~.GetUserRequest], + Awaitable[~.User]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_user" not in self._stubs: + self._stubs["get_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/GetUser", + request_serializer=user.GetUserRequest.serialize, + response_deserializer=user.User.deserialize, + ) + return self._stubs["get_user"] + + @property + def create_user( + self, + ) -> Callable[[gsma_user.CreateUserRequest], Awaitable[gsma_user.User]]: + r"""Return a callable for the create user method over gRPC. + + Creates a Merchant Center account user. Executing + this method requires admin access. + + Returns: + Callable[[~.CreateUserRequest], + Awaitable[~.User]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_user" not in self._stubs: + self._stubs["create_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/CreateUser", + request_serializer=gsma_user.CreateUserRequest.serialize, + response_deserializer=gsma_user.User.deserialize, + ) + return self._stubs["create_user"] + + @property + def delete_user( + self, + ) -> Callable[[user.DeleteUserRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete user method over gRPC. + + Deletes a Merchant Center account user. Executing + this method requires admin access. + + Returns: + Callable[[~.DeleteUserRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_user" not in self._stubs: + self._stubs["delete_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/DeleteUser", + request_serializer=user.DeleteUserRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_user"] + + @property + def update_user( + self, + ) -> Callable[[gsma_user.UpdateUserRequest], Awaitable[gsma_user.User]]: + r"""Return a callable for the update user method over gRPC. + + Updates a Merchant Center account user. Executing + this method requires admin access. + + Returns: + Callable[[~.UpdateUserRequest], + Awaitable[~.User]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_user" not in self._stubs: + self._stubs["update_user"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/UpdateUser", + request_serializer=gsma_user.UpdateUserRequest.serialize, + response_deserializer=gsma_user.User.deserialize, + ) + return self._stubs["update_user"] + + @property + def list_users( + self, + ) -> Callable[[user.ListUsersRequest], Awaitable[user.ListUsersResponse]]: + r"""Return a callable for the list users method over gRPC. + + Lists all users of a Merchant Center account. + + Returns: + Callable[[~.ListUsersRequest], + Awaitable[~.ListUsersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_users" not in self._stubs: + self._stubs["list_users"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.UserService/ListUsers", + request_serializer=user.ListUsersRequest.serialize, + response_deserializer=user.ListUsersResponse.deserialize, + ) + return self._stubs["list_users"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_user: gapic_v1.method_async.wrap_method( + self.get_user, + default_timeout=None, + client_info=client_info, + ), + self.create_user: gapic_v1.method_async.wrap_method( + self.create_user, + default_timeout=None, + client_info=client_info, + ), + self.delete_user: gapic_v1.method_async.wrap_method( + self.delete_user, + default_timeout=None, + client_info=client_info, + ), + self.update_user: gapic_v1.method_async.wrap_method( + self.update_user, + default_timeout=None, + client_info=client_info, + ), + self.list_users: gapic_v1.method_async.wrap_method( + self.list_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("UserServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/rest.py new file mode 100644 index 000000000000..058cc95891ca --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/transports/rest.py @@ -0,0 +1,771 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import UserServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class UserServiceRestInterceptor: + """Interceptor for UserService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the UserServiceRestTransport. + + .. code-block:: python + class MyCustomUserServiceInterceptor(UserServiceRestInterceptor): + def pre_create_user(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_user(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_user(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_user(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_user(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_users(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_users(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_user(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_user(self, response): + logging.log(f"Received response: {response}") + return response + + transport = UserServiceRestTransport(interceptor=MyCustomUserServiceInterceptor()) + client = UserServiceClient(transport=transport) + + + """ + + def pre_create_user( + self, request: gsma_user.CreateUserRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gsma_user.CreateUserRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_user + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserService server. + """ + return request, metadata + + def post_create_user(self, response: gsma_user.User) -> gsma_user.User: + """Post-rpc interceptor for create_user + + Override in a subclass to manipulate the response + after it is returned by the UserService server but before + it is returned to user code. + """ + return response + + def pre_delete_user( + self, request: user.DeleteUserRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[user.DeleteUserRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_user + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserService server. + """ + return request, metadata + + def pre_get_user( + self, request: user.GetUserRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[user.GetUserRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_user + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserService server. + """ + return request, metadata + + def post_get_user(self, response: user.User) -> user.User: + """Post-rpc interceptor for get_user + + Override in a subclass to manipulate the response + after it is returned by the UserService server but before + it is returned to user code. + """ + return response + + def pre_list_users( + self, request: user.ListUsersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[user.ListUsersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_users + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserService server. + """ + return request, metadata + + def post_list_users( + self, response: user.ListUsersResponse + ) -> user.ListUsersResponse: + """Post-rpc interceptor for list_users + + Override in a subclass to manipulate the response + after it is returned by the UserService server but before + it is returned to user code. + """ + return response + + def pre_update_user( + self, request: gsma_user.UpdateUserRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gsma_user.UpdateUserRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_user + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserService server. + """ + return request, metadata + + def post_update_user(self, response: gsma_user.User) -> gsma_user.User: + """Post-rpc interceptor for update_user + + Override in a subclass to manipulate the response + after it is returned by the UserService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class UserServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: UserServiceRestInterceptor + + +class UserServiceRestTransport(UserServiceTransport): + """REST backend transport for UserService. + + Service to support user API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[UserServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or UserServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateUser(UserServiceRestStub): + def __hash__(self): + return hash("CreateUser") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "userId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsma_user.CreateUserRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_user.User: + r"""Call the create user method over HTTP. + + Args: + request (~.gsma_user.CreateUserRequest): + The request object. Request message for the ``CreateUser`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsma_user.User: + A + `user `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/accounts/v1beta/{parent=accounts/*}/users", + "body": "user", + }, + ] + request, metadata = self._interceptor.pre_create_user(request, metadata) + pb_request = gsma_user.CreateUserRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsma_user.User() + pb_resp = gsma_user.User.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_user(resp) + return resp + + class _DeleteUser(UserServiceRestStub): + def __hash__(self): + return hash("DeleteUser") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: user.DeleteUserRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete user method over HTTP. + + Args: + request (~.user.DeleteUserRequest): + The request object. Request message for the ``DeleteUser`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/accounts/v1beta/{name=accounts/*/users/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_user(request, metadata) + pb_request = user.DeleteUserRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetUser(UserServiceRestStub): + def __hash__(self): + return hash("GetUser") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: user.GetUserRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user.User: + r"""Call the get user method over HTTP. + + Args: + request (~.user.GetUserRequest): + The request object. Request message for the ``GetUser`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.user.User: + A + `user `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/users/*}", + }, + ] + request, metadata = self._interceptor.pre_get_user(request, metadata) + pb_request = user.GetUserRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = user.User() + pb_resp = user.User.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_user(resp) + return resp + + class _ListUsers(UserServiceRestStub): + def __hash__(self): + return hash("ListUsers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: user.ListUsersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user.ListUsersResponse: + r"""Call the list users method over HTTP. + + Args: + request (~.user.ListUsersRequest): + The request object. Request message for the ``ListUsers`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.user.ListUsersResponse: + Response message for the ``ListUsers`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{parent=accounts/*}/users", + }, + ] + request, metadata = self._interceptor.pre_list_users(request, metadata) + pb_request = user.ListUsersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = user.ListUsersResponse() + pb_resp = user.ListUsersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_users(resp) + return resp + + class _UpdateUser(UserServiceRestStub): + def __hash__(self): + return hash("UpdateUser") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsma_user.UpdateUserRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsma_user.User: + r"""Call the update user method over HTTP. + + Args: + request (~.gsma_user.UpdateUserRequest): + The request object. Request message for the ``UpdateUser`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsma_user.User: + A + `user `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{user.name=accounts/*/users/*}", + "body": "user", + }, + ] + request, metadata = self._interceptor.pre_update_user(request, metadata) + pb_request = gsma_user.UpdateUserRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsma_user.User() + pb_resp = gsma_user.User.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_user(resp) + return resp + + @property + def create_user(self) -> Callable[[gsma_user.CreateUserRequest], gsma_user.User]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateUser(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_user(self) -> Callable[[user.DeleteUserRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteUser(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_user(self) -> Callable[[user.GetUserRequest], user.User]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetUser(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_users(self) -> Callable[[user.ListUsersRequest], user.ListUsersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsers(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_user(self) -> Callable[[gsma_user.UpdateUserRequest], gsma_user.User]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateUser(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("UserServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py new file mode 100644 index 000000000000..fdd477be2f6a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .accessright import AccessRight +from .account_tax import ( + AccountTax, + GetAccountTaxRequest, + ListAccountTaxRequest, + ListAccountTaxResponse, + UpdateAccountTaxRequest, +) +from .accountissue import ( + AccountIssue, + ListAccountIssuesRequest, + ListAccountIssuesResponse, +) +from .accounts import ( + Account, + CreateAndConfigureAccountRequest, + DeleteAccountRequest, + GetAccountRequest, + ListAccountsRequest, + ListAccountsResponse, + ListSubAccountsRequest, + ListSubAccountsResponse, + UpdateAccountRequest, +) +from .businessidentity import ( + BusinessIdentity, + GetBusinessIdentityRequest, + UpdateBusinessIdentityRequest, +) +from .businessinfo import ( + BusinessInfo, + GetBusinessInfoRequest, + UpdateBusinessInfoRequest, +) +from .customerservice import CustomerService +from .emailpreferences import ( + EmailPreferences, + GetEmailPreferencesRequest, + UpdateEmailPreferencesRequest, +) +from .homepage import ( + ClaimHomepageRequest, + GetHomepageRequest, + Homepage, + UnclaimHomepageRequest, + UpdateHomepageRequest, +) +from .online_return_policy import ( + GetOnlineReturnPolicyRequest, + ListOnlineReturnPoliciesRequest, + ListOnlineReturnPoliciesResponse, + OnlineReturnPolicy, +) +from .phoneverificationstate import PhoneVerificationState +from .programs import ( + DisableProgramRequest, + EnableProgramRequest, + GetProgramRequest, + ListProgramsRequest, + ListProgramsResponse, + Program, +) +from .regions import ( + CreateRegionRequest, + DeleteRegionRequest, + GetRegionRequest, + ListRegionsRequest, + ListRegionsResponse, + Region, + UpdateRegionRequest, +) +from .shippingsettings import ( + Address, + BusinessDayConfig, + CarrierRate, + CutoffTime, + DeliveryTime, + Distance, + GetShippingSettingsRequest, + Headers, + InsertShippingSettingsRequest, + LocationIdSet, + MinimumOrderValueTable, + RateGroup, + Row, + Service, + ShippingSettings, + Table, + TransitTable, + Value, + Warehouse, + WarehouseBasedDeliveryTime, + WarehouseCutoffTime, +) +from .tax_rule import TaxRule +from .termsofservice import ( + AcceptTermsOfServiceRequest, + GetTermsOfServiceRequest, + RetrieveLatestTermsOfServiceRequest, + TermsOfService, +) +from .termsofserviceagreementstate import ( + Accepted, + GetTermsOfServiceAgreementStateRequest, + Required, + RetrieveForApplicationTermsOfServiceAgreementStateRequest, + TermsOfServiceAgreementState, +) +from .termsofservicekind import TermsOfServiceKind +from .user import ( + CreateUserRequest, + DeleteUserRequest, + GetUserRequest, + ListUsersRequest, + ListUsersResponse, + UpdateUserRequest, + User, +) + +__all__ = ( + "AccessRight", + "AccountTax", + "GetAccountTaxRequest", + "ListAccountTaxRequest", + "ListAccountTaxResponse", + "UpdateAccountTaxRequest", + "AccountIssue", + "ListAccountIssuesRequest", + "ListAccountIssuesResponse", + "Account", + "CreateAndConfigureAccountRequest", + "DeleteAccountRequest", + "GetAccountRequest", + "ListAccountsRequest", + "ListAccountsResponse", + "ListSubAccountsRequest", + "ListSubAccountsResponse", + "UpdateAccountRequest", + "BusinessIdentity", + "GetBusinessIdentityRequest", + "UpdateBusinessIdentityRequest", + "BusinessInfo", + "GetBusinessInfoRequest", + "UpdateBusinessInfoRequest", + "CustomerService", + "EmailPreferences", + "GetEmailPreferencesRequest", + "UpdateEmailPreferencesRequest", + "ClaimHomepageRequest", + "GetHomepageRequest", + "Homepage", + "UnclaimHomepageRequest", + "UpdateHomepageRequest", + "GetOnlineReturnPolicyRequest", + "ListOnlineReturnPoliciesRequest", + "ListOnlineReturnPoliciesResponse", + "OnlineReturnPolicy", + "PhoneVerificationState", + "DisableProgramRequest", + "EnableProgramRequest", + "GetProgramRequest", + "ListProgramsRequest", + "ListProgramsResponse", + "Program", + "CreateRegionRequest", + "DeleteRegionRequest", + "GetRegionRequest", + "ListRegionsRequest", + "ListRegionsResponse", + "Region", + "UpdateRegionRequest", + "Address", + "BusinessDayConfig", + "CarrierRate", + "CutoffTime", + "DeliveryTime", + "Distance", + "GetShippingSettingsRequest", + "Headers", + "InsertShippingSettingsRequest", + "LocationIdSet", + "MinimumOrderValueTable", + "RateGroup", + "Row", + "Service", + "ShippingSettings", + "Table", + "TransitTable", + "Value", + "Warehouse", + "WarehouseBasedDeliveryTime", + "WarehouseCutoffTime", + "TaxRule", + "AcceptTermsOfServiceRequest", + "GetTermsOfServiceRequest", + "RetrieveLatestTermsOfServiceRequest", + "TermsOfService", + "Accepted", + "GetTermsOfServiceAgreementStateRequest", + "Required", + "RetrieveForApplicationTermsOfServiceAgreementStateRequest", + "TermsOfServiceAgreementState", + "TermsOfServiceKind", + "CreateUserRequest", + "DeleteUserRequest", + "GetUserRequest", + "ListUsersRequest", + "ListUsersResponse", + "UpdateUserRequest", + "User", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accessright.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accessright.py new file mode 100644 index 000000000000..b5247ed735a9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accessright.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AccessRight", + }, +) + + +class AccessRight(proto.Enum): + r"""The access right. + + Values: + ACCESS_RIGHT_UNSPECIFIED (0): + Default value. This value is unused. + STANDARD (1): + Standard access rights. + ADMIN (2): + Admin access rights. + PERFORMANCE_REPORTING (3): + Users with this right have access to + performance and insights. + """ + ACCESS_RIGHT_UNSPECIFIED = 0 + STANDARD = 1 + ADMIN = 2 + PERFORMANCE_REPORTING = 3 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/account_tax.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/account_tax.py new file mode 100644 index 000000000000..46d195c876ad --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/account_tax.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import tax_rule + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AccountTax", + "GetAccountTaxRequest", + "UpdateAccountTaxRequest", + "ListAccountTaxRequest", + "ListAccountTaxResponse", + }, +) + + +class AccountTax(proto.Message): + r"""The tax settings of a merchant account. All methods require + the admin role. + + Attributes: + name (str): + Identifier. The name of the tax setting. Format: + "{account_tax.name=accounts/{account}}". + account (int): + Output only. The ID of the account to which + these account tax settings belong. + tax_rules (MutableSequence[google.shopping.merchant_accounts_v1beta.types.TaxRule]): + Tax rules. "Define the tax rules in each + region. No tax will be presented if a region has + no rule.". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + account: int = proto.Field( + proto.INT64, + number=2, + ) + tax_rules: MutableSequence[tax_rule.TaxRule] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=tax_rule.TaxRule, + ) + + +class GetAccountTaxRequest(proto.Message): + r"""Request to get tax settings + + Attributes: + name (str): + Required. The name from which tax settings + will be retrieved + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAccountTaxRequest(proto.Message): + r"""Request to update the tax settings + + Attributes: + account_tax (google.shopping.merchant_accounts_v1beta.types.AccountTax): + Required. The tax setting that will be + updated + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated + """ + + account_tax: "AccountTax" = proto.Field( + proto.MESSAGE, + number=1, + message="AccountTax", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListAccountTaxRequest(proto.Message): + r"""Request to list all sub-account tax settings only for the + requesting merchant This method can only be called on a + multi-client account, otherwise it'll return an error. + + Attributes: + parent (str): + Required. The parent, which owns this + collection of account tax. Format: + accounts/{account} + page_size (int): + The maximum number of tax settings to return + in the response, used for paging. + page_token (str): + The token returned by the previous request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAccountTaxResponse(proto.Message): + r"""Response to account tax list request + This method can only be called on a multi-client account, + otherwise it'll return an error. + + Attributes: + account_taxes (MutableSequence[google.shopping.merchant_accounts_v1beta.types.AccountTax]): + Page of accounttax settings + next_page_token (str): + The token for the retrieval of the next page + of account tax settings. + """ + + @property + def raw_page(self): + return self + + account_taxes: MutableSequence["AccountTax"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AccountTax", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py new file mode 100644 index 000000000000..91e837cf849b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.shopping.type.types import types +from google.type import datetime_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AccountIssue", + "ListAccountIssuesRequest", + "ListAccountIssuesResponse", + }, +) + + +class AccountIssue(proto.Message): + r"""An + ```AccountIssue`` `__. + + Attributes: + name (str): + Identifier. The resource name of the account issue. Format: + ``accounts/{account}/issues/{id}`` + title (str): + The localized title of the issue. + severity (google.shopping.merchant_accounts_v1beta.types.AccountIssue.Severity): + The overall severity of the issue. + impacted_destinations (MutableSequence[google.shopping.merchant_accounts_v1beta.types.AccountIssue.ImpactedDestination]): + The impact this issue has on various + destinations. + detail (str): + Further localized details about the issue. + documentation_uri (str): + Link to Merchant Center Help Center providing + further information about the issue and how to + fix it. + """ + + class Severity(proto.Enum): + r"""All possible issue severities. + + Values: + SEVERITY_UNSPECIFIED (0): + The severity is unknown. + CRITICAL (1): + The issue causes offers to not serve. + ERROR (2): + The issue might affect offers (in the future) + or might be an indicator of issues with offers. + SUGGESTION (3): + The issue is a suggestion for improvement. + """ + SEVERITY_UNSPECIFIED = 0 + CRITICAL = 1 + ERROR = 2 + SUGGESTION = 3 + + class ImpactedDestination(proto.Message): + r"""The impact of the issue on a destination. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + The impacted reporting context. + + This field is a member of `oneof`_ ``_reporting_context``. + impacts (MutableSequence[google.shopping.merchant_accounts_v1beta.types.AccountIssue.ImpactedDestination.Impact]): + The (negative) impact for various regions on + the given destination. + """ + + class Impact(proto.Message): + r"""The impact of the issue on a region. + + Attributes: + region_code (str): + The `CLDR region code `__ where + this issue applies. + severity (google.shopping.merchant_accounts_v1beta.types.AccountIssue.Severity): + The severity of the issue on the destination + and region. + """ + + region_code: str = proto.Field( + proto.STRING, + number=1, + ) + severity: "AccountIssue.Severity" = proto.Field( + proto.ENUM, + number=2, + enum="AccountIssue.Severity", + ) + + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=types.ReportingContext.ReportingContextEnum, + ) + impacts: MutableSequence[ + "AccountIssue.ImpactedDestination.Impact" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AccountIssue.ImpactedDestination.Impact", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + title: str = proto.Field( + proto.STRING, + number=2, + ) + severity: Severity = proto.Field( + proto.ENUM, + number=3, + enum=Severity, + ) + impacted_destinations: MutableSequence[ImpactedDestination] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ImpactedDestination, + ) + detail: str = proto.Field( + proto.STRING, + number=5, + ) + documentation_uri: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ListAccountIssuesRequest(proto.Message): + r"""Request message for the ``ListAccountIssues`` method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of issues. + Format: ``accounts/{account}`` + page_size (int): + Optional. The maximum number of issues to + return. The service may return fewer than this + value. If unspecified, at most 50 users will be + returned. The maximum value is 100; values above + 100 will be coerced to 100 + page_token (str): + Optional. A page token, received from a previous + ``ListAccountIssues`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAccountIssues`` must match the call that provided the + page token. + language_code (str): + Optional. The issues in the response will have + human-readable fields in the given language. The format is + `BCP-47 `__, such as + ``en-US`` or ``sr-Latn``. If not value is provided, + ``en-US`` will be used. + time_zone (google.type.datetime_pb2.TimeZone): + Optional. The `IANA `__ + timezone used to localize times in human-readable fields. + For example 'America/Los_Angeles'. If not set, + 'America/Los_Angeles' will be used. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + language_code: str = proto.Field( + proto.STRING, + number=4, + ) + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=5, + message=datetime_pb2.TimeZone, + ) + + +class ListAccountIssuesResponse(proto.Message): + r"""Response message for the ``ListAccountIssues`` method. + + Attributes: + account_issues (MutableSequence[google.shopping.merchant_accounts_v1beta.types.AccountIssue]): + The issues from the specified account. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + account_issues: MutableSequence["AccountIssue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AccountIssue", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py new file mode 100644 index 000000000000..a7b454e96fa1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +import proto # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import user + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "Account", + "GetAccountRequest", + "CreateAndConfigureAccountRequest", + "DeleteAccountRequest", + "UpdateAccountRequest", + "ListAccountsRequest", + "ListAccountsResponse", + "ListSubAccountsRequest", + "ListSubAccountsResponse", + }, +) + + +class Account(proto.Message): + r"""An account. + + Attributes: + name (str): + Identifier. The resource name of the account. Format: + ``accounts/{account}`` + account_id (int): + Output only. The ID of the account. + account_name (str): + Required. A human-readable name of the account. See `store + name `__ + and `business + name `__ + for more information. + adult_content (bool): + Whether this account contains adult content. + test_account (bool): + Output only. Whether this is a test account. + time_zone (google.type.datetime_pb2.TimeZone): + Required. The time zone of the account. + + On writes, ``time_zone`` sets both the + ``reporting_time_zone`` and the ``display_time_zone``. + + For reads, ``time_zone`` always returns the + ``display_time_zone``. If ``display_time_zone`` doesn't + exist for your account, ``time_zone`` is empty. + language_code (str): + Required. The account's `BCP-47 language + code `__, such as + ``en-US`` or ``sr-Latn``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + account_id: int = proto.Field( + proto.INT64, + number=2, + ) + account_name: str = proto.Field( + proto.STRING, + number=3, + ) + adult_content: bool = proto.Field( + proto.BOOL, + number=4, + ) + test_account: bool = proto.Field( + proto.BOOL, + number=5, + ) + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=6, + message=datetime_pb2.TimeZone, + ) + language_code: str = proto.Field( + proto.STRING, + number=7, + ) + + +class GetAccountRequest(proto.Message): + r"""Request message for the ``GetAccount`` method. + + Attributes: + name (str): + Required. The name of the account to retrieve. Format: + ``accounts/{account}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAndConfigureAccountRequest(proto.Message): + r"""Request message for the ``CreateAndConfigureAccount`` method. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + account (google.shopping.merchant_accounts_v1beta.types.Account): + Required. The account to be created. + users (MutableSequence[google.shopping.merchant_accounts_v1beta.types.CreateUserRequest]): + Optional. Users to be added to the account. + accept_terms_of_service (google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest.AcceptTermsOfService): + Optional. The Terms of Service (ToS) to be + accepted immediately upon account creation. + + This field is a member of `oneof`_ ``_accept_terms_of_service``. + service (MutableSequence[google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest.AddAccountService]): + Optional. If specified, an account service + between the account to be created and the + provider account is initialized as part of the + creation. + """ + + class AcceptTermsOfService(proto.Message): + r"""Reference to a Terms of Service resource. + + Attributes: + name (str): + Required. The resource name of the terms of + service version. + region_code (str): + Required. Region code as defined by + `CLDR `__. This is either a + country when the ToS applies specifically to that country or + ``001`` when it applies globally. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + region_code: str = proto.Field( + proto.STRING, + number=3, + ) + + class AddAccountService(proto.Message): + r"""Additional instructions to add account services during + creation of the account. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + account_aggregation (google.protobuf.empty_pb2.Empty): + The provider is an aggregator for the + account. + + This field is a member of `oneof`_ ``service_type``. + provider (str): + Optional. The provider of the service. Format: + ``accounts/{account}`` + + This field is a member of `oneof`_ ``_provider``. + """ + + account_aggregation: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=2, + oneof="service_type", + message=empty_pb2.Empty, + ) + provider: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + account: "Account" = proto.Field( + proto.MESSAGE, + number=1, + message="Account", + ) + users: MutableSequence[user.CreateUserRequest] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=user.CreateUserRequest, + ) + accept_terms_of_service: AcceptTermsOfService = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=AcceptTermsOfService, + ) + service: MutableSequence[AddAccountService] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=AddAccountService, + ) + + +class DeleteAccountRequest(proto.Message): + r"""Request message for the ``DeleteAccount`` method. + + Attributes: + name (str): + Required. The name of the account to delete. Format: + ``accounts/{account}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAccountRequest(proto.Message): + r"""Request message for the ``UpdateAccount`` method. + + Attributes: + account (google.shopping.merchant_accounts_v1beta.types.Account): + Required. The new version of the account. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + account: "Account" = proto.Field( + proto.MESSAGE, + number=1, + message="Account", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListAccountsRequest(proto.Message): + r"""Request message for the ``ListAccounts`` method. + + Attributes: + page_size (int): + Optional. The maximum number of accounts to + return. The service may return fewer than this + value. If unspecified, at most 250 accounts are + returned. The maximum value is 500; values above + 500 are coerced to 500. + page_token (str): + Optional. A page token, received from a previous + ``ListAccounts`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAccounts`` must match the call that provided the page + token. + filter (str): + Optional. Returns only accounts that match the + `filter `__. For more + details, see the `filter syntax + reference `__. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAccountsResponse(proto.Message): + r"""Response message for the ``ListAccounts`` method. + + Attributes: + accounts (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Account]): + The accounts matching the ``ListAccountsRequest``. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + accounts: MutableSequence["Account"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Account", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSubAccountsRequest(proto.Message): + r"""Request message for the ``ListSubAccounts`` method. + + Attributes: + provider (str): + Required. The parent account. Format: ``accounts/{account}`` + page_size (int): + Optional. The maximum number of accounts to + return. The service may return fewer than this + value. If unspecified, at most 250 accounts are + returned. The maximum value is 500; values above + 500 are coerced to 500. + page_token (str): + Optional. A page token, received from a previous + ``ListAccounts`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAccounts`` must match the call that provided the page + token. + """ + + provider: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSubAccountsResponse(proto.Message): + r"""Response message for the ``ListSubAccounts`` method. + + Attributes: + accounts (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Account]): + The accounts for which the given parent + account is an aggregator. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + accounts: MutableSequence["Account"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Account", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessidentity.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessidentity.py new file mode 100644 index 000000000000..9428fb627531 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessidentity.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "BusinessIdentity", + "GetBusinessIdentityRequest", + "UpdateBusinessIdentityRequest", + }, +) + + +class BusinessIdentity(proto.Message): + r"""Collection of information related to the `identity of a + business `__. + + Attributes: + name (str): + Identifier. The resource name of the business identity. + Format: ``accounts/{account}/businessIdentity`` + promotions_consent (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.PromotionsConsent): + Optional. Whether the identity attributes may + be used for promotions. + black_owned (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.IdentityAttribute): + Optional. Specifies whether the business identifies itself + as being black-owned. This optional field will only be + available for merchants with a business country set to + ``US``. It is also not applicable for marketplaces or + marketplace sellers. + women_owned (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.IdentityAttribute): + Optional. Specifies whether the business identifies itself + as being women-owned. This optional field will only be + available for merchants with a business country set to + ``US``. It is also not applicable for marketplaces or + marketplace sellers. + veteran_owned (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.IdentityAttribute): + Optional. Specifies whether the business identifies itself + as being veteran-owned. This optional field will only be + available for merchants with a business country set to + ``US``. It is also not applicable for marketplaces or + marketplace sellers. + latino_owned (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.IdentityAttribute): + Optional. Specifies whether the business identifies itself + as being latino-owned. This optional field will only be + available for merchants with a business country set to + ``US``. It is also not applicable for marketplaces or + marketplace sellers. + small_business (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.IdentityAttribute): + Optional. Specifies whether the business identifies itself + as a small business. This optional field will only be + available for merchants with a business country set to + ``US``. It is also not applicable for marketplaces. + """ + + class PromotionsConsent(proto.Enum): + r"""All possible settings regarding promotions related to the + business identity. + + Values: + PROMOTIONS_CONSENT_UNSPECIFIED (0): + Default value indicating that no selection + was made. + PROMOTIONS_CONSENT_GIVEN (1): + Indicates that the account consented to + having their business identity used for + promotions. + PROMOTIONS_CONSENT_DENIED (2): + Indicates that the account did not consent to + having their business identity used for + promotions. + """ + PROMOTIONS_CONSENT_UNSPECIFIED = 0 + PROMOTIONS_CONSENT_GIVEN = 1 + PROMOTIONS_CONSENT_DENIED = 2 + + class IdentityAttribute(proto.Message): + r"""All information related to an identity attribute. + + Attributes: + identity_declaration (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity.IdentityAttribute.IdentityDeclaration): + Required. The declaration of identity for + this attribute. + """ + + class IdentityDeclaration(proto.Enum): + r"""All possible settings regarding the declaration of an + identity. + + Values: + IDENTITY_DECLARATION_UNSPECIFIED (0): + Default value indicating that no selection + was made. + SELF_IDENTIFIES_AS (1): + Indicates that the account identifies with + the attribute. + DOES_NOT_SELF_IDENTIFY_AS (2): + Indicates that the account does not identify + with the attribute. + """ + IDENTITY_DECLARATION_UNSPECIFIED = 0 + SELF_IDENTIFIES_AS = 1 + DOES_NOT_SELF_IDENTIFY_AS = 2 + + identity_declaration: "BusinessIdentity.IdentityAttribute.IdentityDeclaration" = proto.Field( + proto.ENUM, + number=1, + enum="BusinessIdentity.IdentityAttribute.IdentityDeclaration", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + promotions_consent: PromotionsConsent = proto.Field( + proto.ENUM, + number=2, + enum=PromotionsConsent, + ) + black_owned: IdentityAttribute = proto.Field( + proto.MESSAGE, + number=3, + message=IdentityAttribute, + ) + women_owned: IdentityAttribute = proto.Field( + proto.MESSAGE, + number=4, + message=IdentityAttribute, + ) + veteran_owned: IdentityAttribute = proto.Field( + proto.MESSAGE, + number=5, + message=IdentityAttribute, + ) + latino_owned: IdentityAttribute = proto.Field( + proto.MESSAGE, + number=6, + message=IdentityAttribute, + ) + small_business: IdentityAttribute = proto.Field( + proto.MESSAGE, + number=7, + message=IdentityAttribute, + ) + + +class GetBusinessIdentityRequest(proto.Message): + r"""Request message for the ``GetBusinessIdentity`` method. + + Attributes: + name (str): + Required. The resource name of the business identity. + Format: ``accounts/{account}/businessIdentity`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateBusinessIdentityRequest(proto.Message): + r"""Request message for the ``UpdateBusinessIdentity`` method. + + Attributes: + business_identity (google.shopping.merchant_accounts_v1beta.types.BusinessIdentity): + Required. The new version of the business + identity. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + business_identity: "BusinessIdentity" = proto.Field( + proto.MESSAGE, + number=1, + message="BusinessIdentity", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py new file mode 100644 index 000000000000..ab8e68e599c3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import phone_number_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +import proto # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import ( + customerservice, + phoneverificationstate, +) + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "BusinessInfo", + "GetBusinessInfoRequest", + "UpdateBusinessInfoRequest", + }, +) + + +class BusinessInfo(proto.Message): + r"""Collection of information related to a business. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the business info. Format: + ``accounts/{account}/businessInfo`` + address (google.type.postal_address_pb2.PostalAddress): + Optional. The address of the business. + + This field is a member of `oneof`_ ``_address``. + phone (google.type.phone_number_pb2.PhoneNumber): + Output only. The phone number of the + business. + + This field is a member of `oneof`_ ``_phone``. + phone_verification_state (google.shopping.merchant_accounts_v1beta.types.PhoneVerificationState): + Output only. The phone verification state of + the business. + + This field is a member of `oneof`_ ``_phone_verification_state``. + customer_service (google.shopping.merchant_accounts_v1beta.types.CustomerService): + Optional. The customer service of the + business. + + This field is a member of `oneof`_ ``_customer_service``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + address: postal_address_pb2.PostalAddress = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=postal_address_pb2.PostalAddress, + ) + phone: phone_number_pb2.PhoneNumber = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=phone_number_pb2.PhoneNumber, + ) + phone_verification_state: phoneverificationstate.PhoneVerificationState = ( + proto.Field( + proto.ENUM, + number=4, + optional=True, + enum=phoneverificationstate.PhoneVerificationState, + ) + ) + customer_service: customerservice.CustomerService = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=customerservice.CustomerService, + ) + + +class GetBusinessInfoRequest(proto.Message): + r"""Request message for the ``GetBusinessInfo`` method. + + Attributes: + name (str): + Required. The resource name of the business info. Format: + ``accounts/{account}/businessInfo`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateBusinessInfoRequest(proto.Message): + r"""Request message for the ``UpdateBusinessInfo`` method. + + Attributes: + business_info (google.shopping.merchant_accounts_v1beta.types.BusinessInfo): + Required. The new version of the business + info. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + business_info: "BusinessInfo" = proto.Field( + proto.MESSAGE, + number=1, + message="BusinessInfo", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/customerservice.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/customerservice.py new file mode 100644 index 000000000000..210951bf3c67 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/customerservice.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import phone_number_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "CustomerService", + }, +) + + +class CustomerService(proto.Message): + r"""Customer service information. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + Optional. The URI where customer service may + be found. + + This field is a member of `oneof`_ ``_uri``. + email (str): + Optional. The email address where customer + service may be reached. + + This field is a member of `oneof`_ ``_email``. + phone (google.type.phone_number_pb2.PhoneNumber): + Optional. The phone number where customer + service may be called. + + This field is a member of `oneof`_ ``_phone``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + email: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + phone: phone_number_pb2.PhoneNumber = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=phone_number_pb2.PhoneNumber, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/emailpreferences.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/emailpreferences.py new file mode 100644 index 000000000000..225927fdfc6b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/emailpreferences.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "EmailPreferences", + "GetEmailPreferencesRequest", + "UpdateEmailPreferencesRequest", + }, +) + + +class EmailPreferences(proto.Message): + r"""The categories of notifications the user opted into / opted + out of. The email preferences do not include mandatory + announcements as users can't opt out of them. + + Attributes: + name (str): + Identifier. The name of the EmailPreferences. + The endpoint is only supported for the + authenticated user. + news_and_tips (google.shopping.merchant_accounts_v1beta.types.EmailPreferences.OptInState): + Optional. Updates on new features, tips and + best practices. + """ + + class OptInState(proto.Enum): + r"""Opt in state of the email preference. + + Values: + OPT_IN_STATE_UNSPECIFIED (0): + Opt-in status is not specified. + OPTED_OUT (1): + User has opted out of receiving this type of + email. + OPTED_IN (2): + User has opted in to receiving this type of + email. + UNCONFIRMED (3): + User has opted in to receiving this type of + email and the confirmation email has been sent, + but user has not yet confirmed the opt in + (applies only to certain countries). + """ + OPT_IN_STATE_UNSPECIFIED = 0 + OPTED_OUT = 1 + OPTED_IN = 2 + UNCONFIRMED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + news_and_tips: OptInState = proto.Field( + proto.ENUM, + number=2, + enum=OptInState, + ) + + +class GetEmailPreferencesRequest(proto.Message): + r"""Request message for GetEmailPreferences method. + + Attributes: + name (str): + Required. The name of the ``EmailPreferences`` resource. + Format: + ``accounts/{account}/users/{email}/emailPreferences`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateEmailPreferencesRequest(proto.Message): + r"""Request message for UpdateEmailPreferences method. + + Attributes: + email_preferences (google.shopping.merchant_accounts_v1beta.types.EmailPreferences): + Required. Email Preferences to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + email_preferences: "EmailPreferences" = proto.Field( + proto.MESSAGE, + number=1, + message="EmailPreferences", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/homepage.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/homepage.py new file mode 100644 index 000000000000..a582b2f4063d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/homepage.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "Homepage", + "GetHomepageRequest", + "UpdateHomepageRequest", + "ClaimHomepageRequest", + "UnclaimHomepageRequest", + }, +) + + +class Homepage(proto.Message): + r"""A store's homepage. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the store's homepage. + Format: ``accounts/{account}/homepage`` + uri (str): + Required. The URI (typically a URL) of the + store's homepage. + + This field is a member of `oneof`_ ``_uri``. + claimed (bool): + Output only. Whether the homepage is claimed. + See + https://support.google.com/merchants/answer/176793. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + claimed: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetHomepageRequest(proto.Message): + r"""Request message for the ``GetHomepage`` method. + + Attributes: + name (str): + Required. The name of the homepage to retrieve. Format: + ``accounts/{account}/homepage`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateHomepageRequest(proto.Message): + r"""Request message for the ``UpdateHomepage`` method. + + Attributes: + homepage (google.shopping.merchant_accounts_v1beta.types.Homepage): + Required. The new version of the homepage. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + homepage: "Homepage" = proto.Field( + proto.MESSAGE, + number=1, + message="Homepage", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ClaimHomepageRequest(proto.Message): + r"""Request message for the ``ClaimHomepage`` method. + + Attributes: + name (str): + Required. The name of the homepage to claim. Format: + ``accounts/{account}/homepage`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnclaimHomepageRequest(proto.Message): + r"""Request message for the ``UnclaimHomepage`` method. + + Attributes: + name (str): + Required. The name of the homepage to unclaim. Format: + ``accounts/{account}/homepage`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/online_return_policy.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/online_return_policy.py new file mode 100644 index 000000000000..8df71b3fbbe8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/online_return_policy.py @@ -0,0 +1,406 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.shopping.type.types import types +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "GetOnlineReturnPolicyRequest", + "ListOnlineReturnPoliciesRequest", + "ListOnlineReturnPoliciesResponse", + "OnlineReturnPolicy", + }, +) + + +class GetOnlineReturnPolicyRequest(proto.Message): + r"""Request message for the ``GetOnlineReturnPolicy`` method. + + Attributes: + name (str): + Required. The name of the return policy to retrieve. Format: + ``accounts/{account}/onlineReturnPolicies/{return_policy}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOnlineReturnPoliciesRequest(proto.Message): + r"""Request message for the ``ListOnlineReturnPolicies`` method. + + Attributes: + parent (str): + Required. The merchant account for which to list return + policies. Format: ``accounts/{account}`` + page_size (int): + Optional. The maximum number of ``OnlineReturnPolicy`` + resources to return. The service returns fewer than this + value if the number of return policies for the given + merchant is less that than the ``pageSize``. The default + value is 10. The maximum value is 100; If a value higher + than the maximum is specified, then the ``pageSize`` will + default to the maximum + page_token (str): + Optional. A page token, received from a previous + ``ListOnlineReturnPolicies`` call. Provide the page token to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListOnlineReturnPolicies`` must match the call that + provided the page token. The token returned as + [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token] + in the response to the previous request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListOnlineReturnPoliciesResponse(proto.Message): + r"""Response message for the ``ListOnlineReturnPolicies`` method. + + Attributes: + online_return_policies (MutableSequence[google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy]): + The retrieved return policies. + next_page_token (str): + A token, which can be sent as ``pageToken`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + online_return_policies: MutableSequence["OnlineReturnPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OnlineReturnPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OnlineReturnPolicy(proto.Message): + r"""`Online return + policy `__ + object. This is currently used to represent return policies for ads + and free listings programs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The name of the ``OnlineReturnPolicy`` resource. + Format: + ``accounts/{account}/onlineReturnPolicies/{return_policy}`` + return_policy_id (str): + Output only. Return policy ID generated by + Google. + label (str): + This field represents the unique user-defined label of the + return policy. It is important to note that the same label + cannot be used in different return policies for the same + country. Unless a product specifies a specific label + attribute, policies will be automatically labeled as + 'default'. To assign a custom return policy to certain + product groups, follow the instructions provided in the + [Return policy label] + (https://support.google.com/merchants/answer/9445425). The + label can contain up to 50 characters. + countries (MutableSequence[str]): + The countries of sale where the return policy + applies. The values must be a valid 2 letter ISO + 3166 code. + policy (google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.Policy): + The return policy. + restocking_fee (google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.RestockingFee): + The restocking fee that applies to all return + reason categories. This would be treated as a + free restocking fee if the value is not set. + return_methods (MutableSequence[google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.ReturnMethod]): + The return methods of how customers can + return an item. This value is required to not be + empty unless the type of return policy is + noReturns. + item_conditions (MutableSequence[google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.ItemCondition]): + The item conditions accepted for returns must + not be empty unless the type of return policy is + 'noReturns'. + return_shipping_fee (google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.ReturnShippingFee): + The return shipping fee. Should be set only + when customer need to download and print the + return label. + return_policy_uri (str): + The return policy uri. This can used by + Google to do a sanity check for the policy. It + must be a valid URL. + accept_defective_only (bool): + This field specifies if merchant only accepts + defective products for returns, and this field + is required. + + This field is a member of `oneof`_ ``_accept_defective_only``. + process_refund_days (int): + The field specifies the number of days it + takes for merchants to process refunds, field is + optional. + + This field is a member of `oneof`_ ``_process_refund_days``. + accept_exchange (bool): + This field specifies if merchant allows + customers to exchange products, this field is + required. + + This field is a member of `oneof`_ ``_accept_exchange``. + """ + + class ReturnMethod(proto.Enum): + r"""The available return methods. + + Values: + RETURN_METHOD_UNSPECIFIED (0): + Default value. This value is unused. + BY_MAIL (1): + Return by mail. + IN_STORE (2): + Return in store. + AT_A_KIOSK (3): + Return at a kiosk. + """ + RETURN_METHOD_UNSPECIFIED = 0 + BY_MAIL = 1 + IN_STORE = 2 + AT_A_KIOSK = 3 + + class ItemCondition(proto.Enum): + r"""The available item conditions. + + Values: + ITEM_CONDITION_UNSPECIFIED (0): + Default value. This value is unused. + NEW (1): + New. + USED (2): + Used. + """ + ITEM_CONDITION_UNSPECIFIED = 0 + NEW = 1 + USED = 2 + + class ReturnShippingFee(proto.Message): + r"""The return shipping fee. This can either be a fixed fee or a + boolean to indicate that the customer pays the actual shipping + cost. + + Attributes: + type_ (google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.ReturnShippingFee.Type): + Type of return shipping fee. + fixed_fee (google.shopping.type.types.Price): + Fixed return shipping fee amount. This value is only + applicable when type is ``FIXED``. We will treat the return + shipping fee as free if type is ``FIXED`` and this value is + not set. + """ + + class Type(proto.Enum): + r"""Return shipping fee types. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + FIXED (1): + The return shipping fee is a fixed value. + CUSTOMER_PAYING_ACTUAL_FEE (2): + Customers will pay the actual return shipping + fee. + """ + TYPE_UNSPECIFIED = 0 + FIXED = 1 + CUSTOMER_PAYING_ACTUAL_FEE = 2 + + type_: "OnlineReturnPolicy.ReturnShippingFee.Type" = proto.Field( + proto.ENUM, + number=1, + enum="OnlineReturnPolicy.ReturnShippingFee.Type", + ) + fixed_fee: types.Price = proto.Field( + proto.MESSAGE, + number=2, + message=types.Price, + ) + + class RestockingFee(proto.Message): + r"""The restocking fee. This can be a flat fee or a micro + percent. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_fee (google.shopping.type.types.Price): + Fixed restocking fee. + + This field is a member of `oneof`_ ``type``. + micro_percent (int): + Percent of total price in micros. 15,000,000 + means 15% of the total price would be charged. + + This field is a member of `oneof`_ ``type``. + """ + + fixed_fee: types.Price = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=types.Price, + ) + micro_percent: int = proto.Field( + proto.INT32, + number=2, + oneof="type", + ) + + class Policy(proto.Message): + r"""The available policies. + + Attributes: + type_ (google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy.Policy.Type): + Policy type. + days (int): + The number of days items can be returned after delivery, + where one day is defined as 24 hours after the delivery + timestamp. Required for ``NUMBER_OF_DAYS_AFTER_DELIVERY`` + returns. + """ + + class Type(proto.Enum): + r"""Return policy types. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NUMBER_OF_DAYS_AFTER_DELIVERY (1): + The number of days within which a return is + valid after delivery. + NO_RETURNS (2): + No returns. + LIFETIME_RETURNS (3): + Life time returns. + """ + TYPE_UNSPECIFIED = 0 + NUMBER_OF_DAYS_AFTER_DELIVERY = 1 + NO_RETURNS = 2 + LIFETIME_RETURNS = 3 + + type_: "OnlineReturnPolicy.Policy.Type" = proto.Field( + proto.ENUM, + number=1, + enum="OnlineReturnPolicy.Policy.Type", + ) + days: int = proto.Field( + proto.INT64, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + return_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + label: str = proto.Field( + proto.STRING, + number=3, + ) + countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + policy: Policy = proto.Field( + proto.MESSAGE, + number=5, + message=Policy, + ) + restocking_fee: RestockingFee = proto.Field( + proto.MESSAGE, + number=6, + message=RestockingFee, + ) + return_methods: MutableSequence[ReturnMethod] = proto.RepeatedField( + proto.ENUM, + number=7, + enum=ReturnMethod, + ) + item_conditions: MutableSequence[ItemCondition] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=ItemCondition, + ) + return_shipping_fee: ReturnShippingFee = proto.Field( + proto.MESSAGE, + number=9, + message=ReturnShippingFee, + ) + return_policy_uri: str = proto.Field( + proto.STRING, + number=10, + ) + accept_defective_only: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + process_refund_days: int = proto.Field( + proto.INT32, + number=12, + optional=True, + ) + accept_exchange: bool = proto.Field( + proto.BOOL, + number=13, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/phoneverificationstate.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/phoneverificationstate.py new file mode 100644 index 000000000000..7700cf9c2317 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/phoneverificationstate.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "PhoneVerificationState", + }, +) + + +class PhoneVerificationState(proto.Enum): + r"""The phone verification state. + + Values: + PHONE_VERIFICATION_STATE_UNSPECIFIED (0): + Default value. This value is unused. + PHONE_VERIFICATION_STATE_VERIFIED (1): + The phone is verified. + PHONE_VERIFICATION_STATE_UNVERIFIED (2): + The phone is unverified + """ + PHONE_VERIFICATION_STATE_UNSPECIFIED = 0 + PHONE_VERIFICATION_STATE_VERIFIED = 1 + PHONE_VERIFICATION_STATE_UNVERIFIED = 2 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/programs.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/programs.py new file mode 100644 index 000000000000..da1f6aa71f26 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/programs.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "Program", + "GetProgramRequest", + "ListProgramsRequest", + "ListProgramsResponse", + "EnableProgramRequest", + "DisableProgramRequest", + }, +) + + +class Program(proto.Message): + r"""Defines participation in a given program for the specified account. + + Programs provide a mechanism for adding functionality to merchant + accounts. A typical example of this is the `Free product + listings `__ + program, which enables products from a merchant's store to be shown + across Google for free. + + Attributes: + name (str): + Identifier. The resource name of the program. Format: + ``accounts/{account}/programs/{program}`` + documentation_uri (str): + Output only. The URL of a Merchant Center + help page describing the program. + state (google.shopping.merchant_accounts_v1beta.types.Program.State): + Output only. The participation state of the + account in the program. + active_region_codes (MutableSequence[str]): + Output only. The regions in which the account is actively + participating in the program. Active regions are defined as + those where all program requirements affecting the regions + have been met. + + Region codes are defined by + `CLDR `__. This is either a + country where the program applies specifically to that + country or ``001`` when the program applies globally. + unmet_requirements (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Program.Requirement]): + Output only. The requirements that the + account has not yet satisfied that are affecting + participation in the program. + """ + + class State(proto.Enum): + r"""Possible program participation states for the account. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + NOT_ELIGIBLE (1): + The account is not eligible to participate in + the program. + ELIGIBLE (2): + The account is eligible to participate in the + program. + ENABLED (3): + The program is enabled for the account. + """ + STATE_UNSPECIFIED = 0 + NOT_ELIGIBLE = 1 + ELIGIBLE = 2 + ENABLED = 3 + + class Requirement(proto.Message): + r"""Defines a requirement specified for participation in the + program. + + Attributes: + title (str): + Output only. Name of the requirement. + documentation_uri (str): + Output only. The URL of a help page + describing the requirement. + affected_region_codes (MutableSequence[str]): + Output only. The regions that are currently affected by this + requirement not being met. + + Region codes are defined by + `CLDR `__. This is either a + country where the program applies specifically to that + country or ``001`` when the program applies globally. + """ + + title: str = proto.Field( + proto.STRING, + number=1, + ) + documentation_uri: str = proto.Field( + proto.STRING, + number=2, + ) + affected_region_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + documentation_uri: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + active_region_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + unmet_requirements: MutableSequence[Requirement] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Requirement, + ) + + +class GetProgramRequest(proto.Message): + r"""Request message for the GetProgram method. + + Attributes: + name (str): + Required. The name of the program to retrieve. Format: + ``accounts/{account}/programs/{program}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListProgramsRequest(proto.Message): + r"""Request message for the ListPrograms method. + + Attributes: + parent (str): + Required. The name of the account for which to retrieve all + programs. Format: ``accounts/{account}`` + page_size (int): + Optional. The maximum number of programs to + return in a single response. If unspecified (or + 0), a default size of 1000 is used. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A continuation token, received from a previous + ``ListPrograms`` call. Provide this to retrieve the next + page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListProgramsResponse(proto.Message): + r"""Response message for the ListPrograms method. + + Attributes: + programs (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Program]): + The programs for the given account. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + programs: MutableSequence["Program"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Program", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EnableProgramRequest(proto.Message): + r"""Request message for the EnableProgram method. + + Attributes: + name (str): + Required. The name of the program for which to enable + participation for the given account. Format: + ``accounts/{account}/programs/{program}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DisableProgramRequest(proto.Message): + r"""Request message for the DisableProgram method. + + Attributes: + name (str): + Required. The name of the program for which to disable + participation for the given account. Format: + ``accounts/{account}/programs/{program}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/regions.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/regions.py new file mode 100644 index 000000000000..2902938df755 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/regions.py @@ -0,0 +1,323 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "GetRegionRequest", + "CreateRegionRequest", + "UpdateRegionRequest", + "DeleteRegionRequest", + "ListRegionsRequest", + "ListRegionsResponse", + "Region", + }, +) + + +class GetRegionRequest(proto.Message): + r"""Request message for the ``GetRegion`` method. + + Attributes: + name (str): + Required. The name of the region to retrieve. Format: + ``accounts/{account}/regions/{region}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateRegionRequest(proto.Message): + r"""Request message for the ``CreateRegion`` method. + + Attributes: + parent (str): + Required. The account to create a region for. Format: + ``accounts/{account}`` + region_id (str): + Required. The identifier for the region, + unique over all regions of the same account. + region (google.shopping.merchant_accounts_v1beta.types.Region): + Required. The region to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + region_id: str = proto.Field( + proto.STRING, + number=2, + ) + region: "Region" = proto.Field( + proto.MESSAGE, + number=3, + message="Region", + ) + + +class UpdateRegionRequest(proto.Message): + r"""Request message for the ``UpdateRegion`` method. + + Attributes: + region (google.shopping.merchant_accounts_v1beta.types.Region): + Required. The updated region. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The comma-separated field mask indicating the + fields to update. Example: + ``"displayName,postalCodeArea.regionCode"``. + """ + + region: "Region" = proto.Field( + proto.MESSAGE, + number=1, + message="Region", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteRegionRequest(proto.Message): + r"""Request message for the ``DeleteRegion`` method. + + Attributes: + name (str): + Required. The name of the region to delete. Format: + ``accounts/{account}/regions/{region}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRegionsRequest(proto.Message): + r"""Request message for the ``ListRegions`` method. + + Attributes: + parent (str): + Required. The account to list regions for. Format: + ``accounts/{account}`` + page_size (int): + Optional. The maximum number of regions to + return. The service may return fewer than this + value. If unspecified, at most 50 regions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListRegions`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListRegions`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListRegionsResponse(proto.Message): + r"""Response message for the ``ListRegions`` method. + + Attributes: + regions (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Region]): + The regions from the specified merchant. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + regions: MutableSequence["Region"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Region", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Region(proto.Message): + r"""Represents a geographic region that you can use as a target with + both the ``RegionalInventory`` and ``ShippingSettings`` services. + You can define regions as collections of either postal codes or, in + some countries, using predefined geotargets. For more information, + see `Set up + regions `__ + for more information. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the region. Format: + ``accounts/{account}/regions/{region}`` + display_name (str): + Optional. The display name of the region. + + This field is a member of `oneof`_ ``_display_name``. + postal_code_area (google.shopping.merchant_accounts_v1beta.types.Region.PostalCodeArea): + Optional. A list of postal codes that defines + the region area. + geotarget_area (google.shopping.merchant_accounts_v1beta.types.Region.GeoTargetArea): + Optional. A list of geotargets that defines + the region area. + regional_inventory_eligible (google.protobuf.wrappers_pb2.BoolValue): + Output only. Indicates if the region is + eligible for use in the Regional Inventory + configuration. + shipping_eligible (google.protobuf.wrappers_pb2.BoolValue): + Output only. Indicates if the region is + eligible for use in the Shipping Services + configuration. + """ + + class PostalCodeArea(proto.Message): + r"""A list of postal codes that defines the region area. Note: All + regions defined using postal codes are accessible through the + account's ``ShippingSettings.postalCodeGroups`` resource. + + Attributes: + region_code (str): + Required. `CLDR territory + code `__ + or the country the postal code group applies to. + postal_codes (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Region.PostalCodeArea.PostalCodeRange]): + Required. A range of postal codes. + """ + + class PostalCodeRange(proto.Message): + r"""A range of postal codes that defines the region area. + + Attributes: + begin (str): + Required. A postal code or a pattern of the form prefix\* + denoting the inclusive lower bound of the range defining the + area. Examples values: ``94108``, ``9410*``, ``9*``. + end (str): + Optional. A postal code or a pattern of the form ``prefix*`` + denoting the inclusive upper bound of the range defining the + area. It must have the same length as postalCodeRangeBegin: + if postalCodeRangeBegin is a postal code then + postalCodeRangeEnd must be a postal code too; if + postalCodeRangeBegin is a pattern then postalCodeRangeEnd + must be a pattern with the same prefix length. Optional: if + not set, then the area is defined as being all the postal + codes matching postalCodeRangeBegin. + """ + + begin: str = proto.Field( + proto.STRING, + number=1, + ) + end: str = proto.Field( + proto.STRING, + number=2, + ) + + region_code: str = proto.Field( + proto.STRING, + number=1, + ) + postal_codes: MutableSequence[ + "Region.PostalCodeArea.PostalCodeRange" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Region.PostalCodeArea.PostalCodeRange", + ) + + class GeoTargetArea(proto.Message): + r"""A list of geotargets that defines the region area. + + Attributes: + geotarget_criteria_ids (MutableSequence[int]): + Required. A non-empty list of `location + IDs `__. + They must all be of the same location type (for example, + state). + """ + + geotarget_criteria_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + postal_code_area: PostalCodeArea = proto.Field( + proto.MESSAGE, + number=3, + message=PostalCodeArea, + ) + geotarget_area: GeoTargetArea = proto.Field( + proto.MESSAGE, + number=4, + message=GeoTargetArea, + ) + regional_inventory_eligible: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=5, + message=wrappers_pb2.BoolValue, + ) + shipping_eligible: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=6, + message=wrappers_pb2.BoolValue, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py new file mode 100644 index 000000000000..93ccf59230a9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py @@ -0,0 +1,1498 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.shopping.type.types import types +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "ShippingSettings", + "Service", + "Distance", + "Warehouse", + "WarehouseCutoffTime", + "Address", + "DeliveryTime", + "CutoffTime", + "BusinessDayConfig", + "WarehouseBasedDeliveryTime", + "RateGroup", + "Table", + "TransitTable", + "MinimumOrderValueTable", + "Headers", + "LocationIdSet", + "Row", + "Value", + "CarrierRate", + "GetShippingSettingsRequest", + "InsertShippingSettingsRequest", + }, +) + + +class ShippingSettings(proto.Message): + r"""The merchant account's [shipping + setting]((https://support.google.com/merchants/answer/6069284). + + Attributes: + name (str): + Identifier. The resource name of the shipping setting. + Format: ``accounts/{account}/shippingSetting`` + services (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Service]): + Optional. The target account's list of + services. + warehouses (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Warehouse]): + Optional. A list of warehouses which can be referred to in + ``services``. + etag (str): + Required. This field is used for avoid async + issue. Make sure shipping setting data + didn't change between get call and insert call. + The user should do following steps: + + 1. Set etag field as empty string for initial + shipping setting creation. + + 2. After initial creation, call get method to + obtain an etag and current shipping setting + data before call insert. + + 3. Modify to wanted shipping setting + information. + + 4. Call insert method with the wanted shipping + setting information with the etag obtained + from step 2. + + 5. If shipping setting data changed between step + 2 and step 4. Insert request will fail + because the etag changes every time the + shipping setting data changes. User should + repeate step 2-4 with the new etag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + services: MutableSequence["Service"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Service", + ) + warehouses: MutableSequence["Warehouse"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Warehouse", + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Service(proto.Message): + r"""Shipping service. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service_name (str): + Required. Free-form name of the service. Must + be unique within target account. + + This field is a member of `oneof`_ ``_service_name``. + active (bool): + Required. A boolean exposing the active + status of the shipping service. + + This field is a member of `oneof`_ ``_active``. + delivery_countries (MutableSequence[str]): + Required. The CLDR territory code of the + countries to which the service applies. + currency_code (str): + The CLDR code of the currency to which this + service applies. Must match that of the prices + in rate groups. + + This field is a member of `oneof`_ ``_currency_code``. + delivery_time (google.shopping.merchant_accounts_v1beta.types.DeliveryTime): + Required. Time spent in various aspects from + order to the delivery of the product. + + This field is a member of `oneof`_ ``_delivery_time``. + rate_groups (MutableSequence[google.shopping.merchant_accounts_v1beta.types.RateGroup]): + Optional. Shipping rate group definitions. Only the last one + is allowed to have an empty ``applicable_shipping_labels``, + which means "everything else". The other + ``applicable_shipping_labels`` must not overlap. + shipment_type (google.shopping.merchant_accounts_v1beta.types.Service.ShipmentType): + Type of locations this service ships orders + to. + + This field is a member of `oneof`_ ``_shipment_type``. + minimum_order_value (google.shopping.type.types.Price): + Minimum order value for this service. If set, indicates that + customers will have to spend at least this amount. All + prices within a service must have the same currency. Cannot + be set together with minimum_order_value_table. + + This field is a member of `oneof`_ ``_minimum_order_value``. + minimum_order_value_table (google.shopping.merchant_accounts_v1beta.types.MinimumOrderValueTable): + Table of per store minimum order values for the pickup + fulfillment type. Cannot be set together with + minimum_order_value. + + This field is a member of `oneof`_ ``_minimum_order_value_table``. + store_config (google.shopping.merchant_accounts_v1beta.types.Service.StoreConfig): + A list of stores your products are delivered + from. This is only valid for the local delivery + shipment type. + + This field is a member of `oneof`_ ``_store_config``. + loyalty_programs (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Service.LoyaltyProgram]): + Optional. Loyalty programs that this shipping + service is limited to. + """ + + class ShipmentType(proto.Enum): + r"""Shipment type of shipping service. + + Values: + SHIPMENT_TYPE_UNSPECIFIED (0): + This service did not specify shipment type. + DELIVERY (1): + This service ships orders to an address + chosen by the customer. + LOCAL_DELIVERY (2): + This service ships orders to an address + chosen by the customer. The order is shipped + from a local store near by. + COLLECTION_POINT (3): + This service ships orders to an address + chosen by the customer. The order is shipped + from a collection point. + """ + SHIPMENT_TYPE_UNSPECIFIED = 0 + DELIVERY = 1 + LOCAL_DELIVERY = 2 + COLLECTION_POINT = 3 + + class StoreConfig(proto.Message): + r"""A list of stores your products are delivered from. + This is only valid for the local delivery shipment type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + store_service_type (google.shopping.merchant_accounts_v1beta.types.Service.StoreConfig.StoreServiceType): + Indicates whether all stores, or selected + stores, listed by this merchant provide local + delivery. + + This field is a member of `oneof`_ ``_store_service_type``. + store_codes (MutableSequence[str]): + Optional. A list of store codes that provide local delivery. + If empty, then ``all_stores`` must be true. + cutoff_config (google.shopping.merchant_accounts_v1beta.types.Service.StoreConfig.CutoffConfig): + Configs related to local delivery ends for + the day. + + This field is a member of `oneof`_ ``_cutoff_config``. + service_radius (google.shopping.merchant_accounts_v1beta.types.Distance): + Maximum delivery radius. + This is only required for the local delivery + shipment type. + + This field is a member of `oneof`_ ``_service_radius``. + """ + + class StoreServiceType(proto.Enum): + r"""Indicates whether all stores, or selected stores, listed by + the merchant provide local delivery. + + Values: + STORE_SERVICE_TYPE_UNSPECIFIED (0): + Did not specify store service type. + ALL_STORES (1): + Indicates whether all stores, current and + future, listed by this merchant provide local + delivery. + SELECTED_STORES (2): + Indicates that only the stores listed in ``store_codes`` are + eligible for local delivery. + """ + STORE_SERVICE_TYPE_UNSPECIFIED = 0 + ALL_STORES = 1 + SELECTED_STORES = 2 + + class CutoffConfig(proto.Message): + r"""Configs related to local delivery ends for the day. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + local_cutoff_time (google.shopping.merchant_accounts_v1beta.types.Service.StoreConfig.CutoffConfig.LocalCutoffTime): + Time that local delivery ends for the day. + + This field is a member of `oneof`_ ``_local_cutoff_time``. + store_close_offset_hours (int): + Only valid with local delivery fulfillment. Represents + cutoff time as the number of hours before store closing. + Mutually exclusive with ``local_cutoff_time``. + + This field is a member of `oneof`_ ``_store_close_offset_hours``. + no_delivery_post_cutoff (bool): + Merchants can opt-out of showing n+1 day local delivery when + they have a shipping service configured to n day local + delivery. For example, if the shipping service defines + same-day delivery, and it's past the cut-off, setting this + field to ``true`` results in the calculated shipping service + rate returning ``NO_DELIVERY_POST_CUTOFF``. In the same + example, setting this field to ``false`` results in the + calculated shipping time being one day. This is only for + local delivery. + + This field is a member of `oneof`_ ``_no_delivery_post_cutoff``. + """ + + class LocalCutoffTime(proto.Message): + r"""Time that local delivery ends for the day. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hour (int): + Hour local delivery orders must be placed by + to process the same day. + + This field is a member of `oneof`_ ``_hour``. + minute (int): + Minute local delivery orders must be placed + by to process the same day. + + This field is a member of `oneof`_ ``_minute``. + """ + + hour: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + minute: int = proto.Field( + proto.INT64, + number=2, + optional=True, + ) + + local_cutoff_time: "Service.StoreConfig.CutoffConfig.LocalCutoffTime" = ( + proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Service.StoreConfig.CutoffConfig.LocalCutoffTime", + ) + ) + store_close_offset_hours: int = proto.Field( + proto.INT64, + number=2, + optional=True, + ) + no_delivery_post_cutoff: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + + store_service_type: "Service.StoreConfig.StoreServiceType" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="Service.StoreConfig.StoreServiceType", + ) + store_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + cutoff_config: "Service.StoreConfig.CutoffConfig" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Service.StoreConfig.CutoffConfig", + ) + service_radius: "Distance" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Distance", + ) + + class LoyaltyProgram(proto.Message): + r"""`Loyalty + program `__ + provided by a merchant. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + program_label (str): + This is the loyalty program label set in your + loyalty program settings in Merchant Center. + This sub-attribute allows Google to map your + loyalty program to eligible offers. + + This field is a member of `oneof`_ ``_program_label``. + loyalty_program_tiers (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Service.LoyaltyProgram.LoyaltyProgramTiers]): + Optional. Loyalty program tier of this + shipping service. + """ + + class LoyaltyProgramTiers(proto.Message): + r"""Subset of a merchants loyalty program. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tier_label (str): + The tier label [tier_label] sub-attribute differentiates + offer level benefits between each tier. This value is also + set in your program settings in Merchant Center, and is + required for data source changes even if your loyalty + program only has 1 tier. + + This field is a member of `oneof`_ ``_tier_label``. + """ + + tier_label: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + program_label: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + loyalty_program_tiers: MutableSequence[ + "Service.LoyaltyProgram.LoyaltyProgramTiers" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Service.LoyaltyProgram.LoyaltyProgramTiers", + ) + + service_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + active: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + delivery_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + currency_code: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + delivery_time: "DeliveryTime" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="DeliveryTime", + ) + rate_groups: MutableSequence["RateGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="RateGroup", + ) + shipment_type: ShipmentType = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=ShipmentType, + ) + minimum_order_value: types.Price = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message=types.Price, + ) + minimum_order_value_table: "MinimumOrderValueTable" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="MinimumOrderValueTable", + ) + store_config: StoreConfig = proto.Field( + proto.MESSAGE, + number=10, + optional=True, + message=StoreConfig, + ) + loyalty_programs: MutableSequence[LoyaltyProgram] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=LoyaltyProgram, + ) + + +class Distance(proto.Message): + r"""Maximum delivery radius. + This is only required for the local delivery shipment type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (int): + Integer value of distance. + + This field is a member of `oneof`_ ``_value``. + unit (google.shopping.merchant_accounts_v1beta.types.Distance.Unit): + Unit can differ based on country, it is + parameterized to include miles and kilometers. + + This field is a member of `oneof`_ ``_unit``. + """ + + class Unit(proto.Enum): + r"""Unit can differ based on country, it is parameterized to + include miles and kilometers. + + Values: + UNIT_UNSPECIFIED (0): + Unit unspecified + MILES (1): + Unit in miles + KILOMETERS (2): + Unit in kilometers + """ + UNIT_UNSPECIFIED = 0 + MILES = 1 + KILOMETERS = 2 + + value: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + unit: Unit = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=Unit, + ) + + +class Warehouse(proto.Message): + r"""A fulfillment warehouse, which stores and handles inventory. + Next tag: 7 + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the warehouse. Must be + unique within account. + + This field is a member of `oneof`_ ``_name``. + shipping_address (google.shopping.merchant_accounts_v1beta.types.Address): + Required. Shipping address of the warehouse. + + This field is a member of `oneof`_ ``_shipping_address``. + cutoff_time (google.shopping.merchant_accounts_v1beta.types.WarehouseCutoffTime): + Required. The latest time of day that an + order can be accepted and begin processing. + Later orders will be processed in the next day. + The time is based on the warehouse postal code. + + This field is a member of `oneof`_ ``_cutoff_time``. + handling_days (int): + Required. The number of days it takes for + this warehouse to pack up and ship an item. This + is on the warehouse level, but can be overridden + on the offer level based on the attributes of an + item. + + This field is a member of `oneof`_ ``_handling_days``. + business_day_config (google.shopping.merchant_accounts_v1beta.types.BusinessDayConfig): + Business days of the warehouse. + If not set, will be Monday to Friday by default. + + This field is a member of `oneof`_ ``_business_day_config``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + shipping_address: "Address" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Address", + ) + cutoff_time: "WarehouseCutoffTime" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="WarehouseCutoffTime", + ) + handling_days: int = proto.Field( + proto.INT64, + number=4, + optional=True, + ) + business_day_config: "BusinessDayConfig" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="BusinessDayConfig", + ) + + +class WarehouseCutoffTime(proto.Message): + r"""The latest time of day that an order can be accepted and + begin processing. Later orders will be processed in the next + day. The time is based on the warehouse postal code. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hour (int): + Required. Hour of the cutoff time until which + an order has to be placed to be processed in the + same day by the warehouse. Hour is based on the + timezone of warehouse. + + This field is a member of `oneof`_ ``_hour``. + minute (int): + Required. Minute of the cutoff time until + which an order has to be placed to be processed + in the same day by the warehouse. Minute is + based on the timezone of warehouse. + + This field is a member of `oneof`_ ``_minute``. + """ + + hour: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + minute: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class Address(proto.Message): + r"""Shipping address of the warehouse. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + street_address (str): + Street-level part of the address. For example: + ``111w 31st Street``. + + This field is a member of `oneof`_ ``_street_address``. + city (str): + Required. City, town or commune. May also + include dependent localities or sublocalities + (For example neighborhoods or suburbs). + + This field is a member of `oneof`_ ``_city``. + administrative_area (str): + Required. Top-level administrative + subdivision of the country. For example, a state + like California ("CA") or a province like Quebec + ("QC"). + + This field is a member of `oneof`_ ``_administrative_area``. + postal_code (str): + Required. Postal code or ZIP (For example + "94043"). + + This field is a member of `oneof`_ ``_postal_code``. + region_code (str): + Required. `CLDR country + code `__ + (For example "US"). + + This field is a member of `oneof`_ ``_region_code``. + """ + + street_address: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + city: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + administrative_area: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + postal_code: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + region_code: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + + +class DeliveryTime(proto.Message): + r"""Time spent in various aspects from order to the delivery of + the product. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_transit_days (int): + Minimum number of business days that is spent in transit. 0 + means same day delivery, 1 means next day delivery. Either + ``min_transit_days``, ``max_transit_days`` or + ``transit_time_table`` must be set, but not both. + + This field is a member of `oneof`_ ``_min_transit_days``. + max_transit_days (int): + Maximum number of business days that is spent in transit. 0 + means same day delivery, 1 means next day delivery. Must be + greater than or equal to ``min_transit_days``. + + This field is a member of `oneof`_ ``_max_transit_days``. + cutoff_time (google.shopping.merchant_accounts_v1beta.types.CutoffTime): + Business days cutoff time definition. + If not configured the cutoff time will be + defaulted to 8AM PST. + + This field is a member of `oneof`_ ``_cutoff_time``. + min_handling_days (int): + Minimum number of business days spent before + an order is shipped. 0 means same day shipped, 1 + means next day shipped. + + This field is a member of `oneof`_ ``_min_handling_days``. + max_handling_days (int): + Maximum number of business days spent before an order is + shipped. 0 means same day shipped, 1 means next day shipped. + Must be greater than or equal to ``min_handling_days``. + + This field is a member of `oneof`_ ``_max_handling_days``. + transit_time_table (google.shopping.merchant_accounts_v1beta.types.TransitTable): + Transit time table, number of business days spent in transit + based on row and column dimensions. Either + ``min_transit_days``, ``max_transit_days`` or + ``transit_time_table`` can be set, but not both. + + This field is a member of `oneof`_ ``_transit_time_table``. + handling_business_day_config (google.shopping.merchant_accounts_v1beta.types.BusinessDayConfig): + The business days during which orders can be + handled. If not provided, Monday to Friday + business days will be assumed. + + This field is a member of `oneof`_ ``_handling_business_day_config``. + transit_business_day_config (google.shopping.merchant_accounts_v1beta.types.BusinessDayConfig): + The business days during which orders can be + in-transit. If not provided, Monday to Friday + business days will be assumed. + + This field is a member of `oneof`_ ``_transit_business_day_config``. + warehouse_based_delivery_times (MutableSequence[google.shopping.merchant_accounts_v1beta.types.WarehouseBasedDeliveryTime]): + Optional. Indicates that the delivery time should be + calculated per warehouse (shipping origin location) based on + the settings of the selected carrier. When set, no other + transit time related field in [delivery + time][[google.shopping.content.bundles.ShippingSetting.DeliveryTime] + should be set. + """ + + min_transit_days: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + max_transit_days: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + cutoff_time: "CutoffTime" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="CutoffTime", + ) + min_handling_days: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + max_handling_days: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + transit_time_table: "TransitTable" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="TransitTable", + ) + handling_business_day_config: "BusinessDayConfig" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="BusinessDayConfig", + ) + transit_business_day_config: "BusinessDayConfig" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="BusinessDayConfig", + ) + warehouse_based_delivery_times: MutableSequence[ + "WarehouseBasedDeliveryTime" + ] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="WarehouseBasedDeliveryTime", + ) + + +class CutoffTime(proto.Message): + r"""Business days cutoff time definition. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hour (int): + Required. Hour of the cutoff time until which + an order has to be placed to be processed in the + same day. + + This field is a member of `oneof`_ ``_hour``. + minute (int): + Required. Minute of the cutoff time until + which an order has to be placed to be processed + in the same day. + + This field is a member of `oneof`_ ``_minute``. + time_zone (str): + Required. `Timezone + identifier `__ + For example "Europe/Zurich". + + This field is a member of `oneof`_ ``_time_zone``. + """ + + hour: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + minute: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + time_zone: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + + +class BusinessDayConfig(proto.Message): + r"""Business days of the warehouse. + + Attributes: + business_days (MutableSequence[google.shopping.merchant_accounts_v1beta.types.BusinessDayConfig.Weekday]): + Required. Regular business days. + May not be empty. + """ + + class Weekday(proto.Enum): + r""" + + Values: + WEEKDAY_UNSPECIFIED (0): + No description available. + MONDAY (1): + No description available. + TUESDAY (2): + No description available. + WEDNESDAY (3): + No description available. + THURSDAY (4): + No description available. + FRIDAY (5): + No description available. + SATURDAY (6): + No description available. + SUNDAY (7): + No description available. + """ + WEEKDAY_UNSPECIFIED = 0 + MONDAY = 1 + TUESDAY = 2 + WEDNESDAY = 3 + THURSDAY = 4 + FRIDAY = 5 + SATURDAY = 6 + SUNDAY = 7 + + business_days: MutableSequence[Weekday] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Weekday, + ) + + +class WarehouseBasedDeliveryTime(proto.Message): + r"""Indicates that the delivery time should be calculated per warehouse + (shipping origin location) based on the settings of the selected + carrier. When set, no other transit time related field in + ``delivery_time`` should be set. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + carrier (str): + Required. Carrier, such as ``"UPS"`` or ``"Fedex"``. + + This field is a member of `oneof`_ ``_carrier``. + carrier_service (str): + Required. Carrier service, such as ``"ground"`` or + ``"2 days"``. The name of the service must be in the + eddSupportedServices list. + + This field is a member of `oneof`_ ``_carrier_service``. + warehouse (str): + Required. Warehouse name. This should match + [warehouse][ShippingSetting.warehouses.name] + + This field is a member of `oneof`_ ``_warehouse``. + """ + + carrier: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + carrier_service: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + warehouse: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + + +class RateGroup(proto.Message): + r"""Shipping rate group definitions. Only the last one is allowed to + have an empty ``applicable_shipping_labels``, which means + "everything else". The other ``applicable_shipping_labels`` must not + overlap. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + applicable_shipping_labels (MutableSequence[str]): + Required. A list of `shipping + labels `__ + defining the products to which this rate group applies to. + This is a disjunction: only one of the labels has to match + for the rate group to apply. May only be empty for the last + rate group of a service. + single_value (google.shopping.merchant_accounts_v1beta.types.Value): + The value of the rate group (For example flat rate $10). Can + only be set if ``main_table`` and ``subtables`` are not set. + + This field is a member of `oneof`_ ``_single_value``. + main_table (google.shopping.merchant_accounts_v1beta.types.Table): + A table defining the rate group, when ``single_value`` is + not expressive enough. Can only be set if ``single_value`` + is not set. + + This field is a member of `oneof`_ ``_main_table``. + subtables (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Table]): + Optional. A list of subtables referred to by ``main_table``. + Can only be set if ``main_table`` is set. + carrier_rates (MutableSequence[google.shopping.merchant_accounts_v1beta.types.CarrierRate]): + Optional. A list of carrier rates that can be referred to by + ``main_table`` or ``single_value``. + name (str): + Optional. Name of the rate group. + If set has to be unique within shipping service. + + This field is a member of `oneof`_ ``_name``. + """ + + applicable_shipping_labels: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + single_value: "Value" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Value", + ) + main_table: "Table" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Table", + ) + subtables: MutableSequence["Table"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Table", + ) + carrier_rates: MutableSequence["CarrierRate"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="CarrierRate", + ) + name: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + + +class Table(proto.Message): + r"""A table defining the rate group, when ``single_value`` is not + expressive enough. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the table. Required for subtables, + ignored for the main table. + + This field is a member of `oneof`_ ``_name``. + row_headers (google.shopping.merchant_accounts_v1beta.types.Headers): + Required. Headers of the table's rows. + + This field is a member of `oneof`_ ``_row_headers``. + column_headers (google.shopping.merchant_accounts_v1beta.types.Headers): + Headers of the table's columns. Optional: if + not set then the table has only one dimension. + + This field is a member of `oneof`_ ``_column_headers``. + rows (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Row]): + Required. The list of rows that constitute the table. Must + have the same length as ``row_headers``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + row_headers: "Headers" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Headers", + ) + column_headers: "Headers" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Headers", + ) + rows: MutableSequence["Row"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Row", + ) + + +class TransitTable(proto.Message): + r"""Transit time table, number of business days spent in transit based + on row and column dimensions. Either ``min_transit_days``, + ``max_transit_days`` or ``transit_time_table`` can be set, but not + both. + + Attributes: + postal_code_group_names (MutableSequence[str]): + Required. A list of region names + [Region.name][google.shopping.merchant.accounts.v1beta.Region.name] + . The last value can be ``"all other locations"``. Example: + ``["zone 1", "zone 2", "all other locations"]``. The + referred postal code groups must match the delivery country + of the service. + transit_time_labels (MutableSequence[str]): + Required. A list of transit time labels. The last value can + be ``"all other labels"``. Example: + ``["food", "electronics", "all other labels"]``. + rows (MutableSequence[google.shopping.merchant_accounts_v1beta.types.TransitTable.TransitTimeRow]): + Required. If there's only one dimension set of + ``postal_code_group_names`` or ``transit_time_labels``, + there are multiple rows each with one value for that + dimension. If there are two dimensions, each row corresponds + to a ``postal_code_group_names``, and columns (values) to a + ``transit_time_labels``. + """ + + class TransitTimeRow(proto.Message): + r"""If there's only one dimension set of ``postal_code_group_names`` or + ``transit_time_labels``, there are multiple rows each with one value + for that dimension. If there are two dimensions, each row + corresponds to a ``postal_code_group_names``, and columns (values) + to a ``transit_time_labels``. + + Attributes: + values (MutableSequence[google.shopping.merchant_accounts_v1beta.types.TransitTable.TransitTimeRow.TransitTimeValue]): + Required. Transit time range (min-max) in + business days. + """ + + class TransitTimeValue(proto.Message): + r"""Transit time range (min-max) in business days. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_transit_days (int): + Minimum transit time range in business days. + 0 means same day delivery, 1 means next day + delivery. + + This field is a member of `oneof`_ ``_min_transit_days``. + max_transit_days (int): + Must be greater than or equal to ``min_transit_days``. + + This field is a member of `oneof`_ ``_max_transit_days``. + """ + + min_transit_days: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + max_transit_days: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + values: MutableSequence[ + "TransitTable.TransitTimeRow.TransitTimeValue" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TransitTable.TransitTimeRow.TransitTimeValue", + ) + + postal_code_group_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + transit_time_labels: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + rows: MutableSequence[TransitTimeRow] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TransitTimeRow, + ) + + +class MinimumOrderValueTable(proto.Message): + r"""Table of per store minimum order values for the pickup + fulfillment type. + + Attributes: + store_code_set_with_movs (MutableSequence[google.shopping.merchant_accounts_v1beta.types.MinimumOrderValueTable.StoreCodeSetWithMov]): + Required. A list of store code sets sharing + the same minimum order value (MOV). At least two + sets are required and the last one must be + empty, which signifies 'MOV for all other + stores'. Each store code can only appear once + across all the sets. All prices within a service + must have the same currency. + """ + + class StoreCodeSetWithMov(proto.Message): + r"""A list of store code sets sharing the same minimum order + value. At least two sets are required and the last one must be + empty, which signifies 'MOV for all other stores'. + Each store code can only appear once across all the sets. All + prices within a service must have the same currency. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + store_codes (MutableSequence[str]): + Optional. A list of unique store codes or + empty for the catch all. + value (google.shopping.type.types.Price): + The minimum order value for the given stores. + + This field is a member of `oneof`_ ``_value``. + """ + + store_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + value: types.Price = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=types.Price, + ) + + store_code_set_with_movs: MutableSequence[ + StoreCodeSetWithMov + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=StoreCodeSetWithMov, + ) + + +class Headers(proto.Message): + r"""A non-empty list of row or column headers for a table. Exactly one + of ``prices``, ``weights``, ``num_items``, + ``postal_code_group_names``, or ``location`` must be set. + + Attributes: + prices (MutableSequence[google.shopping.type.types.Price]): + Required. A list of inclusive order price upper bounds. The + last price's value can be infinity by setting price + amount_micros = -1. For example + ``[{"amount_micros": 10000000, "currency_code": "USD"}, {"amount_micros": 500000000, "currency_code": "USD"}, {"amount_micros": -1, "currency_code": "USD"}]`` + represents the headers "<= $10", "<= $500", and "> $500". + All prices within a service must have the same currency. + Must be non-empty. Must be positive except -1. Can only be + set if all other fields are not set. + weights (MutableSequence[google.shopping.type.types.Weight]): + Required. A list of inclusive order weight upper bounds. The + last weight's value can be infinity by setting price + amount_micros = -1. For example + ``[{"amount_micros": 10000000, "unit": "kg"}, {"amount_micros": 50000000, "unit": "kg"}, {"amount_micros": -1, "unit": "kg"}]`` + represents the headers "<= 10kg", "<= 50kg", and "> 50kg". + All weights within a service must have the same unit. Must + be non-empty. Must be positive except -1. Can only be set if + all other fields are not set. + number_of_items (MutableSequence[str]): + Required. A list of inclusive number of items upper bounds. + The last value can be ``"infinity"``. For example + ``["10", "50", "infinity"]`` represents the headers "<= 10 + items", "<= 50 items", and "> 50 items". Must be non-empty. + Can only be set if all other fields are not set. + postal_code_group_names (MutableSequence[str]): + Required. A list of postal group names. The last value can + be ``"all other locations"``. Example: + ``["zone 1", "zone 2", "all other locations"]``. The + referred postal code groups must match the delivery country + of the service. Must be non-empty. Can only be set if all + other fields are not set. + locations (MutableSequence[google.shopping.merchant_accounts_v1beta.types.LocationIdSet]): + Required. A list of location ID sets. Must be + non-empty. Can only be set if all other fields + are not set. + """ + + prices: MutableSequence[types.Price] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=types.Price, + ) + weights: MutableSequence[types.Weight] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=types.Weight, + ) + number_of_items: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + postal_code_group_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + locations: MutableSequence["LocationIdSet"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="LocationIdSet", + ) + + +class LocationIdSet(proto.Message): + r"""A list of location ID sets. Must be non-empty. Can only be + set if all other fields are not set. + + Attributes: + location_ids (MutableSequence[str]): + Required. A non-empty list of `location + IDs `__. + They must all be of the same location type (For example, + state). + """ + + location_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class Row(proto.Message): + r"""Include a list of cells. + + Attributes: + cells (MutableSequence[google.shopping.merchant_accounts_v1beta.types.Value]): + Required. The list of cells that constitute the row. Must + have the same length as ``columnHeaders`` for + two-dimensional tables, a length of 1 for one-dimensional + tables. + """ + + cells: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) + + +class Value(proto.Message): + r"""The single value of a rate group or the value of a rate group + table's cell. Exactly one of ``no_shipping``, ``flat_rate``, + ``price_percentage``, ``carrier_rateName``, ``subtable_name`` must + be set. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + no_shipping (bool): + If true, then the product can't be shipped. + Must be true when set, can only be set if all + other fields are not set. + + This field is a member of `oneof`_ ``_no_shipping``. + flat_rate (google.shopping.type.types.Price): + A flat rate. Can only be set if all other + fields are not set. + + This field is a member of `oneof`_ ``_flat_rate``. + price_percentage (str): + A percentage of the price represented as a number in decimal + notation (For example, ``"5.4"``). Can only be set if all + other fields are not set. + + This field is a member of `oneof`_ ``_price_percentage``. + carrier_rate (str): + The name of a carrier rate referring to a + carrier rate defined in the same rate group. Can + only be set if all other fields are not set. + + This field is a member of `oneof`_ ``_carrier_rate``. + subtable (str): + The name of a subtable. Can only be set in + table cells (For example, not for single + values), and only if all other fields are not + set. + + This field is a member of `oneof`_ ``_subtable``. + """ + + no_shipping: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + flat_rate: types.Price = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=types.Price, + ) + price_percentage: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + carrier_rate: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + subtable: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + + +class CarrierRate(proto.Message): + r"""A list of carrier rates that can be referred to by ``main_table`` or + ``single_value``. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the carrier rate. Must be + unique per rate group. + + This field is a member of `oneof`_ ``_name``. + carrier (str): + Required. Carrier service, such as ``"UPS"`` or ``"Fedex"``. + + This field is a member of `oneof`_ ``_carrier``. + carrier_service (str): + Required. Carrier service, such as ``"ground"`` or + ``"2 days"``. + + This field is a member of `oneof`_ ``_carrier_service``. + origin_postal_code (str): + Required. Shipping origin for this carrier + rate. + + This field is a member of `oneof`_ ``_origin_postal_code``. + percentage_adjustment (str): + Optional. Multiplicative shipping rate modifier as a number + in decimal notation. Can be negative. For example ``"5.4"`` + increases the rate by 5.4%, ``"-3"`` decreases the rate by + 3%. + + This field is a member of `oneof`_ ``_percentage_adjustment``. + flat_adjustment (google.shopping.type.types.Price): + Optional. Additive shipping rate modifier. Can be negative. + For example + ``{ "amount_micros": 1, "currency_code" : "USD" }`` adds $1 + to the rate, + ``{ "amount_micros": -3, "currency_code" : "USD" }`` removes + $3 from the rate. + + This field is a member of `oneof`_ ``_flat_adjustment``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + carrier: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + carrier_service: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + origin_postal_code: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + percentage_adjustment: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + flat_adjustment: types.Price = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=types.Price, + ) + + +class GetShippingSettingsRequest(proto.Message): + r"""Request message for the ``GetShippingSetting`` method. + + Attributes: + name (str): + Required. The name of the shipping setting to retrieve. + Format: ``accounts/{account}/shippingsetting`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InsertShippingSettingsRequest(proto.Message): + r"""Request message for the ``InsertShippingSetting`` method. + + Attributes: + parent (str): + Required. The account where this product will + be inserted. Format: accounts/{account} + shipping_setting (google.shopping.merchant_accounts_v1beta.types.ShippingSettings): + Required. The new version of the account. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + shipping_setting: "ShippingSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="ShippingSettings", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/tax_rule.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/tax_rule.py new file mode 100644 index 000000000000..c6e8c3f8c1ad --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/tax_rule.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import interval_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "TaxRule", + }, +) + + +class TaxRule(proto.Message): + r"""Primary type convension + + percent micro : 100% = 1 000 000 and 1% = 10 000 + cannot be negative. + + Information about tax nexus and related parameters applicable to + orders delivered to the area covered by a single tax admin. + Nexus is created when a merchant is doing business in an area + administered by tax admin (only US states are supported for + nexus configuration). If merchant has nexus in a US state, + merchant needs to pay tax to all tax authorities associated with + the shipping destination. + Next Id : 8 + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + location_id (int): + The admin_id or criteria_id of the region in which this rule + is applicable. + + This field is a member of `oneof`_ ``location``. + post_code_range (google.shopping.merchant_accounts_v1beta.types.TaxRule.TaxPostalCodeRange): + The range of postal codes in which this rule + is applicable. + + This field is a member of `oneof`_ ``location``. + use_google_rate (bool): + Rate that depends on delivery location: if + merchant has a nexus in corresponding US state, + rates from authorities with jurisdiction over + delivery area are added up. + + This field is a member of `oneof`_ ``rate_calculation``. + self_specified_rate_micros (int): + A fixed rate specified in micros, where 100% = 1_000_000. + Suitable for origin-based states. + + This field is a member of `oneof`_ ``rate_calculation``. + region_code (str): + Region code in which this rule is applicable + shipping_taxed (bool): + If set, shipping charge is taxed (at the same + rate as product) when delivering to this admin's + area. Can only be set on US states without + category. + effective_time_period (google.type.interval_pb2.Interval): + Required. Time period when this rule is effective. If the + duration is missing from effective_time listed, then it is + open ended to the future. The start of this time period is + inclusive, and the end is exclusive. + """ + + class TaxPostalCodeRange(proto.Message): + r"""A range of postal codes that defines the area. + + Attributes: + start (str): + Required. The start of the postal code range, + which is also the smallest in the range. + end (str): + The end of the postal code range. Will be the + same as start if not specified. + """ + + start: str = proto.Field( + proto.STRING, + number=1, + ) + end: str = proto.Field( + proto.STRING, + number=2, + ) + + location_id: int = proto.Field( + proto.INT64, + number=2, + oneof="location", + ) + post_code_range: TaxPostalCodeRange = proto.Field( + proto.MESSAGE, + number=3, + oneof="location", + message=TaxPostalCodeRange, + ) + use_google_rate: bool = proto.Field( + proto.BOOL, + number=4, + oneof="rate_calculation", + ) + self_specified_rate_micros: int = proto.Field( + proto.INT64, + number=5, + oneof="rate_calculation", + ) + region_code: str = proto.Field( + proto.STRING, + number=1, + ) + shipping_taxed: bool = proto.Field( + proto.BOOL, + number=6, + ) + effective_time_period: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=7, + message=interval_pb2.Interval, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py new file mode 100644 index 000000000000..b2e6d71a77e1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofservicekind + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "TermsOfService", + "GetTermsOfServiceRequest", + "RetrieveLatestTermsOfServiceRequest", + "AcceptTermsOfServiceRequest", + }, +) + + +class TermsOfService(proto.Message): + r"""A ``TermsOfService``. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the terms of service + version. Format: ``termsOfService/{version}`` + region_code (str): + Region code as defined by + `CLDR `__. This is either a + country where the ToS applies specifically to that country + or ``001`` when the same ``TermsOfService`` can be signed in + any country. However note that when signing a ToS that + applies globally we still expect that a specific country is + provided (this should be merchant business country or + program country of participation). + kind (google.shopping.merchant_accounts_v1beta.types.TermsOfServiceKind): + The Kind this terms of service version + applies to. + file_uri (str): + URI for terms of service file that needs to + be displayed to signing users. + + This field is a member of `oneof`_ ``_file_uri``. + external (bool): + Whether this terms of service version is + external. External terms of service versions can + only be agreed through external processes and + not directly by the merchant through UI or API. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + kind: termsofservicekind.TermsOfServiceKind = proto.Field( + proto.ENUM, + number=3, + enum=termsofservicekind.TermsOfServiceKind, + ) + file_uri: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + external: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class GetTermsOfServiceRequest(proto.Message): + r"""Request message for the ``GetTermsOfService`` method. + + Attributes: + name (str): + Required. The resource name of the terms of service version. + Format: ``termsOfService/{version}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RetrieveLatestTermsOfServiceRequest(proto.Message): + r"""Request message for the ``RetrieveLatestTermsOfService`` method. + + Attributes: + region_code (str): + Region code as defined by + `CLDR `__. This is either a + country when the ToS applies specifically to that country or + 001 when it applies globally. + kind (google.shopping.merchant_accounts_v1beta.types.TermsOfServiceKind): + The Kind this terms of service version + applies to. + """ + + region_code: str = proto.Field( + proto.STRING, + number=1, + ) + kind: termsofservicekind.TermsOfServiceKind = proto.Field( + proto.ENUM, + number=2, + enum=termsofservicekind.TermsOfServiceKind, + ) + + +class AcceptTermsOfServiceRequest(proto.Message): + r"""Request message for the ``AcceptTermsOfService`` method. + + Attributes: + name (str): + Required. The resource name of the terms of service version. + Format: ``termsOfService/{version}`` + account (str): + Required. The account for which to accept the + ToS. + region_code (str): + Required. Region code as defined by + `CLDR `__. This is either a + country when the ToS applies specifically to that country or + 001 when it applies globally. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + account: str = proto.Field( + proto.STRING, + number=2, + ) + region_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py new file mode 100644 index 000000000000..15b09f8d665b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import date_pb2 # type: ignore +import proto # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import termsofservicekind + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "TermsOfServiceAgreementState", + "Accepted", + "Required", + "GetTermsOfServiceAgreementStateRequest", + "RetrieveForApplicationTermsOfServiceAgreementStateRequest", + }, +) + + +class TermsOfServiceAgreementState(proto.Message): + r"""This resource represents the agreement state for a given account and + terms of service kind. The state is as follows: + + - If the merchant has accepted a terms of service: + `accepted `__ will be + populated, otherwise it will be empty + - If the merchant must sign a terms of service: + `required `__ will be + populated, otherwise it will be empty. + + Note that both `required `__ + and `accepted `__ can be + present. In this case the ``accepted`` terms of services will have + an expiration date set in the `valid_until `__ + field. The ``required`` terms of services need to be accepted before + ``valid_until`` in order for the account to continue having a valid + agreement. When accepting new terms of services we expect 3Ps to + display the text associated with the given terms of service + agreement (the url to the file containing the text is added in the + Required message below as `tos_file_uri `__. + The actual acceptance of the terms of service is done by calling + accept on the `TermsOfService `__ resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the terms of service + version. Format: + ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + region_code (str): + Region code as defined by + https://cldr.unicode.org/. This is the country + the current state applies to. + terms_of_service_kind (google.shopping.merchant_accounts_v1beta.types.TermsOfServiceKind): + Terms of Service kind associated with the + particular version. + accepted (google.shopping.merchant_accounts_v1beta.types.Accepted): + The accepted terms of service of this kind and for the + associated region_code + + This field is a member of `oneof`_ ``_accepted``. + required (google.shopping.merchant_accounts_v1beta.types.Required): + The required terms of service + + This field is a member of `oneof`_ ``_required``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + terms_of_service_kind: termsofservicekind.TermsOfServiceKind = proto.Field( + proto.ENUM, + number=3, + enum=termsofservicekind.TermsOfServiceKind, + ) + accepted: "Accepted" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Accepted", + ) + required: "Required" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="Required", + ) + + +class Accepted(proto.Message): + r"""Describes the accepted terms of service. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + terms_of_service (str): + The accepted + `termsOfService `__. + accepted_by (str): + The account where the acceptance was + recorded. This can be the account itself or, in + the case of subaccounts, the MCA account. + valid_until (google.type.date_pb2.Date): + When set, it states that the accepted + `TermsOfService `__ + is only valid until the end of this date (in UTC). A new one + must be accepted before then. The information of the + required + `TermsOfService `__ + is found in the `Required `__ message. + + This field is a member of `oneof`_ ``_valid_until``. + """ + + terms_of_service: str = proto.Field( + proto.STRING, + number=1, + ) + accepted_by: str = proto.Field( + proto.STRING, + number=2, + ) + valid_until: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=date_pb2.Date, + ) + + +class Required(proto.Message): + r"""Describes the terms of service which are required to be + accepted. + + Attributes: + terms_of_service (str): + The + `termsOfService `__ + that need to be accepted. + tos_file_uri (str): + Full URL to the terms of service file. This field is the + same as + `TermsOfService.file_uri `__, it is + added here for convenience only. + """ + + terms_of_service: str = proto.Field( + proto.STRING, + number=1, + ) + tos_file_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTermsOfServiceAgreementStateRequest(proto.Message): + r"""Request message for the ``GetTermsOfServiceAgreementState`` method. + + Attributes: + name (str): + Required. The resource name of the terms of service version. + Format: + ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RetrieveForApplicationTermsOfServiceAgreementStateRequest(proto.Message): + r"""Request message for the + ``RetrieveForApplicationTermsOfServiceAgreementState`` method. + + Attributes: + parent (str): + Required. The account for which to get a + TermsOfServiceAgreementState Format: ``accounts/{account}`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservicekind.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservicekind.py new file mode 100644 index 000000000000..301cd3c85648 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservicekind.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "TermsOfServiceKind", + }, +) + + +class TermsOfServiceKind(proto.Enum): + r"""The TermsOfService Kind. + + Values: + TERMS_OF_SERVICE_KIND_UNSPECIFIED (0): + Default value. This value is unused. + MERCHANT_CENTER (1): + Merchant Center application. + """ + TERMS_OF_SERVICE_KIND_UNSPECIFIED = 0 + MERCHANT_CENTER = 1 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/user.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/user.py new file mode 100644 index 000000000000..3df78a10bf40 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/user.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import accessright + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "User", + "GetUserRequest", + "CreateUserRequest", + "DeleteUserRequest", + "UpdateUserRequest", + "ListUsersRequest", + "ListUsersResponse", + }, +) + + +class User(proto.Message): + r"""A `user `__. + + Attributes: + name (str): + Identifier. The resource name of the user. Format: + ``accounts/{account}/user/{email}`` + + Use ``me`` to refer to your own email address, for example + ``accounts/{account}/users/me``. + state (google.shopping.merchant_accounts_v1beta.types.User.State): + Output only. The state of the user. + access_rights (MutableSequence[google.shopping.merchant_accounts_v1beta.types.AccessRight]): + Optional. The `access + rights `__ + the user has. + """ + + class State(proto.Enum): + r"""The possible states of a user. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + PENDING (1): + The user is pending confirmation. In this + state, the user first needs to accept the + invitation before performing other actions. + VERIFIED (2): + The user is verified. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + VERIFIED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + access_rights: MutableSequence[accessright.AccessRight] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=accessright.AccessRight, + ) + + +class GetUserRequest(proto.Message): + r"""Request message for the ``GetUser`` method. + + Attributes: + name (str): + Required. The name of the user to retrieve. Format: + ``accounts/{account}/users/{email}`` + + It is also possible to retrieve the user corresponding to + the caller by using ``me`` rather than an email address as + in ``accounts/{account}/users/me``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateUserRequest(proto.Message): + r"""Request message for the ``CreateUser`` method. + + Attributes: + parent (str): + Required. The resource name of the account for which a user + will be created. Format: ``accounts/{account}`` + user_id (str): + Required. The email address of the user (for example, + ``john.doe@gmail.com``). + user (google.shopping.merchant_accounts_v1beta.types.User): + Required. The user to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: str = proto.Field( + proto.STRING, + number=2, + ) + user: "User" = proto.Field( + proto.MESSAGE, + number=3, + message="User", + ) + + +class DeleteUserRequest(proto.Message): + r"""Request message for the ``DeleteUser`` method. + + Attributes: + name (str): + Required. The name of the user to delete. Format: + ``accounts/{account}/users/{email}`` + + It is also possible to delete the user corresponding to the + caller by using ``me`` rather than an email address as in + ``accounts/{account}/users/me``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateUserRequest(proto.Message): + r"""Request message for the ``UpdateUser`` method. + + Attributes: + user (google.shopping.merchant_accounts_v1beta.types.User): + Required. The new version of the user. + + Use ``me`` to refer to your own email address, for example + ``accounts/{account}/users/me``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + user: "User" = proto.Field( + proto.MESSAGE, + number=1, + message="User", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListUsersRequest(proto.Message): + r"""Request message for the ``ListUsers`` method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of users. + Format: ``accounts/{account}`` + page_size (int): + Optional. The maximum number of users to + return. The service may return fewer than this + value. If unspecified, at most 50 users will be + returned. The maximum value is 100; values above + 100 will be coerced to 100 + page_token (str): + Optional. A page token, received from a previous + ``ListUsers`` call. Provide this to retrieve the subsequent + page. + + When paginating, all other parameters provided to + ``ListUsers`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUsersResponse(proto.Message): + r"""Response message for the ``ListUsers`` method. + + Attributes: + users (MutableSequence[google.shopping.merchant_accounts_v1beta.types.User]): + The users from the specified account. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + users: MutableSequence["User"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="User", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/mypy.ini b/packages/google-shopping-merchant-accounts/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-shopping-merchant-accounts/noxfile.py b/packages/google-shopping-merchant-accounts/noxfile.py new file mode 100644 index 000000000000..1e6cd48d0529 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_issue_service_list_account_issues_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_issue_service_list_account_issues_async.py new file mode 100644 index 000000000000..5722ba346d36 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_issue_service_list_account_issues_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccountIssues +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountIssueService_ListAccountIssues_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_account_issues(): + # Create a client + client = merchant_accounts_v1beta.AccountIssueServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountIssuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_issues(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountIssueService_ListAccountIssues_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_issue_service_list_account_issues_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_issue_service_list_account_issues_sync.py new file mode 100644 index 000000000000..65df4005dfdf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_issue_service_list_account_issues_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccountIssues +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountIssueService_ListAccountIssues_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_account_issues(): + # Create a client + client = merchant_accounts_v1beta.AccountIssueServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountIssuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_issues(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountIssueService_ListAccountIssues_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_get_account_tax_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_get_account_tax_async.py new file mode 100644 index 000000000000..72f642dd82c3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_get_account_tax_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAccountTax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountTaxService_GetAccountTax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountTaxRequest( + name="name_value", + ) + + # Make the request + response = await client.get_account_tax(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountTaxService_GetAccountTax_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_get_account_tax_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_get_account_tax_sync.py new file mode 100644 index 000000000000..06126d7f847f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_get_account_tax_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAccountTax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountTaxService_GetAccountTax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountTaxRequest( + name="name_value", + ) + + # Make the request + response = client.get_account_tax(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountTaxService_GetAccountTax_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_list_account_tax_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_list_account_tax_async.py new file mode 100644 index 000000000000..a33b7617b4d6 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_list_account_tax_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccountTax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountTaxService_ListAccountTax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountTaxRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_tax(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountTaxService_ListAccountTax_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_list_account_tax_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_list_account_tax_sync.py new file mode 100644 index 000000000000..c03510d4a8d0 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_list_account_tax_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccountTax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountTaxService_ListAccountTax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountTaxRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_tax(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountTaxService_ListAccountTax_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_update_account_tax_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_update_account_tax_async.py new file mode 100644 index 000000000000..9e0fa6b733f1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_update_account_tax_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccountTax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountTaxService_UpdateAccountTax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateAccountTaxRequest( + ) + + # Make the request + response = await client.update_account_tax(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountTaxService_UpdateAccountTax_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_update_account_tax_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_update_account_tax_sync.py new file mode 100644 index 000000000000..d0fc79fce477 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_account_tax_service_update_account_tax_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccountTax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountTaxService_UpdateAccountTax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_account_tax(): + # Create a client + client = merchant_accounts_v1beta.AccountTaxServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateAccountTaxRequest( + ) + + # Make the request + response = client.update_account_tax(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountTaxService_UpdateAccountTax_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_create_and_configure_account_async.py similarity index 64% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py rename to packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_create_and_configure_account_async.py index 0325bd5acfdd..0f317ab185f3 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_create_and_configure_account_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for UpdateDatasetMetadata +# Snippet for CreateAndConfigureAccount # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-shopping-merchant-accounts -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_sync] +# [START merchantapi_v1beta_generated_AccountsService_CreateAndConfigureAccount_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,21 +31,26 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.shopping import merchant_accounts_v1beta -def sample_update_dataset_metadata(): +async def sample_create_and_configure_account(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.CreateAndConfigureAccountRequest( + account=account, ) # Make the request - response = client.update_dataset_metadata(request=request) + response = await client.create_and_configure_account(request=request) # Handle the response print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_sync] +# [END merchantapi_v1beta_generated_AccountsService_CreateAndConfigureAccount_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_create_and_configure_account_sync.py similarity index 65% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py rename to packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_create_and_configure_account_sync.py index f49434d41e70..7fc130048baa 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_create_and_configure_account_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for UpdateDatasetMetadata +# Snippet for CreateAndConfigureAccount # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-shopping-merchant-accounts -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_async] +# [START merchantapi_v1beta_generated_AccountsService_CreateAndConfigureAccount_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,21 +31,26 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.shopping import merchant_accounts_v1beta -async def sample_update_dataset_metadata(): +def sample_create_and_configure_account(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = merchant_accounts_v1beta.AccountsServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.CreateAndConfigureAccountRequest( + account=account, ) # Make the request - response = await client.update_dataset_metadata(request=request) + response = client.create_and_configure_account(request=request) # Handle the response print(response) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_async] +# [END merchantapi_v1beta_generated_AccountsService_CreateAndConfigureAccount_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_delete_account_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_delete_account_async.py new file mode 100644 index 000000000000..5325080df363 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_delete_account_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_DeleteAccount_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_delete_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteAccountRequest( + name="name_value", + ) + + # Make the request + await client.delete_account(request=request) + + +# [END merchantapi_v1beta_generated_AccountsService_DeleteAccount_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_delete_account_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_delete_account_sync.py new file mode 100644 index 000000000000..e22bf5b78853 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_delete_account_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_DeleteAccount_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_delete_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteAccountRequest( + name="name_value", + ) + + # Make the request + client.delete_account(request=request) + + +# [END merchantapi_v1beta_generated_AccountsService_DeleteAccount_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_get_account_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_get_account_async.py new file mode 100644 index 000000000000..7df8d70104f2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_get_account_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_GetAccount_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountRequest( + name="name_value", + ) + + # Make the request + response = await client.get_account(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_GetAccount_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_get_account_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_get_account_sync.py new file mode 100644 index 000000000000..bb4d4006ad51 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_get_account_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_GetAccount_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAccountRequest( + name="name_value", + ) + + # Make the request + response = client.get_account(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_GetAccount_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_accounts_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_accounts_async.py new file mode 100644 index 000000000000..490f8a95ab51 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_accounts_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccounts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_ListAccounts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountsRequest( + ) + + # Make the request + page_result = client.list_accounts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_ListAccounts_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_accounts_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_accounts_sync.py new file mode 100644 index 000000000000..61e728a1ec1d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_accounts_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccounts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_ListAccounts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListAccountsRequest( + ) + + # Make the request + page_result = client.list_accounts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_ListAccounts_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_sub_accounts_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_sub_accounts_async.py new file mode 100644 index 000000000000..64c1620e14b1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_sub_accounts_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubAccounts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_ListSubAccounts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_sub_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListSubAccountsRequest( + provider="provider_value", + ) + + # Make the request + page_result = client.list_sub_accounts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_ListSubAccounts_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_sub_accounts_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_sub_accounts_sync.py new file mode 100644 index 000000000000..f0e31a111510 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_list_sub_accounts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubAccounts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_ListSubAccounts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_sub_accounts(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListSubAccountsRequest( + provider="provider_value", + ) + + # Make the request + page_result = client.list_sub_accounts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_ListSubAccounts_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_update_account_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_update_account_async.py new file mode 100644 index 000000000000..ad622e5013b6 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_update_account_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_UpdateAccount_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceAsyncClient() + + # Initialize request argument(s) + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.UpdateAccountRequest( + account=account, + ) + + # Make the request + response = await client.update_account(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_UpdateAccount_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_update_account_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_update_account_sync.py new file mode 100644 index 000000000000..9d9c51fc483b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_accounts_service_update_account_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AccountsService_UpdateAccount_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_account(): + # Create a client + client = merchant_accounts_v1beta.AccountsServiceClient() + + # Initialize request argument(s) + account = merchant_accounts_v1beta.Account() + account.account_name = "account_name_value" + account.language_code = "language_code_value" + + request = merchant_accounts_v1beta.UpdateAccountRequest( + account=account, + ) + + # Make the request + response = client.update_account(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AccountsService_UpdateAccount_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_get_business_identity_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_get_business_identity_async.py new file mode 100644 index 000000000000..8637e25620b7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_get_business_identity_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBusinessIdentity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessIdentityService_GetBusinessIdentity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessIdentityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_business_identity(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessIdentityService_GetBusinessIdentity_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_get_business_identity_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_get_business_identity_sync.py new file mode 100644 index 000000000000..235ce1598afa --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_get_business_identity_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBusinessIdentity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessIdentityService_GetBusinessIdentity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessIdentityRequest( + name="name_value", + ) + + # Make the request + response = client.get_business_identity(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessIdentityService_GetBusinessIdentity_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_update_business_identity_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_update_business_identity_async.py new file mode 100644 index 000000000000..089129c55615 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_update_business_identity_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBusinessIdentity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessIdentityService_UpdateBusinessIdentity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessIdentityRequest( + ) + + # Make the request + response = await client.update_business_identity(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessIdentityService_UpdateBusinessIdentity_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_update_business_identity_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_update_business_identity_sync.py new file mode 100644 index 000000000000..b44f9ec5d2bf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_identity_service_update_business_identity_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBusinessIdentity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessIdentityService_UpdateBusinessIdentity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_business_identity(): + # Create a client + client = merchant_accounts_v1beta.BusinessIdentityServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessIdentityRequest( + ) + + # Make the request + response = client.update_business_identity(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessIdentityService_UpdateBusinessIdentity_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_get_business_info_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_get_business_info_async.py new file mode 100644 index 000000000000..15a59aa09700 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_get_business_info_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBusinessInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessInfoService_GetBusinessInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.get_business_info(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessInfoService_GetBusinessInfo_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_get_business_info_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_get_business_info_sync.py new file mode 100644 index 000000000000..0e27da028a6e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_get_business_info_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBusinessInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessInfoService_GetBusinessInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetBusinessInfoRequest( + name="name_value", + ) + + # Make the request + response = client.get_business_info(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessInfoService_GetBusinessInfo_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_update_business_info_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_update_business_info_async.py new file mode 100644 index 000000000000..fdefa869c879 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_update_business_info_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBusinessInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessInfoService_UpdateBusinessInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessInfoRequest( + ) + + # Make the request + response = await client.update_business_info(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessInfoService_UpdateBusinessInfo_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_update_business_info_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_update_business_info_sync.py new file mode 100644 index 000000000000..27702757d482 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_business_info_service_update_business_info_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBusinessInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_BusinessInfoService_UpdateBusinessInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_business_info(): + # Create a client + client = merchant_accounts_v1beta.BusinessInfoServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateBusinessInfoRequest( + ) + + # Make the request + response = client.update_business_info(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_BusinessInfoService_UpdateBusinessInfo_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_async.py new file mode 100644 index 000000000000..9f26a449a0c3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEmailPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_EmailPreferencesService_GetEmailPreferences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetEmailPreferencesRequest( + name="name_value", + ) + + # Make the request + response = await client.get_email_preferences(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_EmailPreferencesService_GetEmailPreferences_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_sync.py new file mode 100644 index 000000000000..30fb5c58c714 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEmailPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_EmailPreferencesService_GetEmailPreferences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetEmailPreferencesRequest( + name="name_value", + ) + + # Make the request + response = client.get_email_preferences(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_EmailPreferencesService_GetEmailPreferences_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_async.py new file mode 100644 index 000000000000..24812e4eca3a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEmailPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_EmailPreferencesService_UpdateEmailPreferences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateEmailPreferencesRequest( + ) + + # Make the request + response = await client.update_email_preferences(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_EmailPreferencesService_UpdateEmailPreferences_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_sync.py new file mode 100644 index 000000000000..12030b2c4e83 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEmailPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_EmailPreferencesService_UpdateEmailPreferences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_email_preferences(): + # Create a client + client = merchant_accounts_v1beta.EmailPreferencesServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateEmailPreferencesRequest( + ) + + # Make the request + response = client.update_email_preferences(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_EmailPreferencesService_UpdateEmailPreferences_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_claim_homepage_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_claim_homepage_async.py new file mode 100644 index 000000000000..17dc74c7d647 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_claim_homepage_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClaimHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_ClaimHomepage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_claim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ClaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = await client.claim_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_ClaimHomepage_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_claim_homepage_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_claim_homepage_sync.py new file mode 100644 index 000000000000..1ba1650aaccc --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_claim_homepage_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClaimHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_ClaimHomepage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_claim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ClaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = client.claim_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_ClaimHomepage_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_get_homepage_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_get_homepage_async.py new file mode 100644 index 000000000000..b098778ce5f9 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_get_homepage_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_GetHomepage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetHomepageRequest( + name="name_value", + ) + + # Make the request + response = await client.get_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_GetHomepage_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_get_homepage_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_get_homepage_sync.py new file mode 100644 index 000000000000..f61ef0d4a6ec --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_get_homepage_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_GetHomepage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetHomepageRequest( + name="name_value", + ) + + # Make the request + response = client.get_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_GetHomepage_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_unclaim_homepage_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_unclaim_homepage_async.py new file mode 100644 index 000000000000..e8a1f01c1c48 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_unclaim_homepage_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UnclaimHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_UnclaimHomepage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_unclaim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UnclaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = await client.unclaim_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_UnclaimHomepage_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_unclaim_homepage_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_unclaim_homepage_sync.py new file mode 100644 index 000000000000..3f1b53762486 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_unclaim_homepage_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UnclaimHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_UnclaimHomepage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_unclaim_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UnclaimHomepageRequest( + name="name_value", + ) + + # Make the request + response = client.unclaim_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_UnclaimHomepage_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_update_homepage_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_update_homepage_async.py new file mode 100644 index 000000000000..46218f36725d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_update_homepage_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_UpdateHomepage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateHomepageRequest( + ) + + # Make the request + response = await client.update_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_UpdateHomepage_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_update_homepage_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_update_homepage_sync.py new file mode 100644 index 000000000000..4e43a500a08e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_homepage_service_update_homepage_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHomepage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_HomepageService_UpdateHomepage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_homepage(): + # Create a client + client = merchant_accounts_v1beta.HomepageServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateHomepageRequest( + ) + + # Make the request + response = client.update_homepage(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_HomepageService_UpdateHomepage_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_async.py new file mode 100644 index 000000000000..cb00645bdc37 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOnlineReturnPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_OnlineReturnPolicyService_GetOnlineReturnPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_online_return_policy(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetOnlineReturnPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_online_return_policy(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_OnlineReturnPolicyService_GetOnlineReturnPolicy_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_sync.py new file mode 100644 index 000000000000..c9390dec8f0f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOnlineReturnPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_OnlineReturnPolicyService_GetOnlineReturnPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_online_return_policy(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetOnlineReturnPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_online_return_policy(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_OnlineReturnPolicyService_GetOnlineReturnPolicy_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_async.py new file mode 100644 index 000000000000..0e74c48947e7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOnlineReturnPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_OnlineReturnPolicyService_ListOnlineReturnPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_online_return_policies(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListOnlineReturnPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_online_return_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_OnlineReturnPolicyService_ListOnlineReturnPolicies_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_sync.py new file mode 100644 index 000000000000..26e715bdb847 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOnlineReturnPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_OnlineReturnPolicyService_ListOnlineReturnPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_online_return_policies(): + # Create a client + client = merchant_accounts_v1beta.OnlineReturnPolicyServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListOnlineReturnPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_online_return_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_OnlineReturnPolicyService_ListOnlineReturnPolicies_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_disable_program_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_disable_program_async.py new file mode 100644 index 000000000000..8a9d70bedb0e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_disable_program_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableProgram +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_DisableProgram_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_disable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DisableProgramRequest( + name="name_value", + ) + + # Make the request + response = await client.disable_program(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_DisableProgram_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_disable_program_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_disable_program_sync.py new file mode 100644 index 000000000000..6c2c98163a78 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_disable_program_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableProgram +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_DisableProgram_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_disable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DisableProgramRequest( + name="name_value", + ) + + # Make the request + response = client.disable_program(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_DisableProgram_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_enable_program_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_enable_program_async.py new file mode 100644 index 000000000000..819600faa170 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_enable_program_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableProgram +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_EnableProgram_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_enable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.EnableProgramRequest( + name="name_value", + ) + + # Make the request + response = await client.enable_program(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_EnableProgram_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_enable_program_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_enable_program_sync.py new file mode 100644 index 000000000000..99292b26bd59 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_enable_program_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableProgram +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_EnableProgram_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_enable_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.EnableProgramRequest( + name="name_value", + ) + + # Make the request + response = client.enable_program(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_EnableProgram_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_get_program_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_get_program_async.py new file mode 100644 index 000000000000..970c08050862 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_get_program_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProgram +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_GetProgram_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetProgramRequest( + name="name_value", + ) + + # Make the request + response = await client.get_program(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_GetProgram_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_get_program_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_get_program_sync.py new file mode 100644 index 000000000000..3e9f77650ab4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_get_program_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProgram +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_GetProgram_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_program(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetProgramRequest( + name="name_value", + ) + + # Make the request + response = client.get_program(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_GetProgram_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_list_programs_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_list_programs_async.py new file mode 100644 index 000000000000..d0db1b835c94 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_list_programs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrograms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_ListPrograms_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_programs(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListProgramsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_programs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_ListPrograms_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_list_programs_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_list_programs_sync.py new file mode 100644 index 000000000000..9adaa3cdd367 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_programs_service_list_programs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrograms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ProgramsService_ListPrograms_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_programs(): + # Create a client + client = merchant_accounts_v1beta.ProgramsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListProgramsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_programs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_ProgramsService_ListPrograms_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_create_region_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_create_region_async.py new file mode 100644 index 000000000000..7780f650a625 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_create_region_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_CreateRegion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_create_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateRegionRequest( + parent="parent_value", + region_id="region_id_value", + ) + + # Make the request + response = await client.create_region(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_CreateRegion_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_create_region_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_create_region_sync.py new file mode 100644 index 000000000000..ab3f7a11f344 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_create_region_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_CreateRegion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_create_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateRegionRequest( + parent="parent_value", + region_id="region_id_value", + ) + + # Make the request + response = client.create_region(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_CreateRegion_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_delete_region_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_delete_region_async.py new file mode 100644 index 000000000000..56093b2280b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_delete_region_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_DeleteRegion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_delete_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteRegionRequest( + name="name_value", + ) + + # Make the request + await client.delete_region(request=request) + + +# [END merchantapi_v1beta_generated_RegionsService_DeleteRegion_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_delete_region_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_delete_region_sync.py new file mode 100644 index 000000000000..551e26a180d2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_delete_region_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_DeleteRegion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_delete_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteRegionRequest( + name="name_value", + ) + + # Make the request + client.delete_region(request=request) + + +# [END merchantapi_v1beta_generated_RegionsService_DeleteRegion_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_get_region_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_get_region_async.py new file mode 100644 index 000000000000..d62409ec8d9c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_get_region_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_GetRegion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetRegionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_region(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_GetRegion_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_get_region_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_get_region_sync.py new file mode 100644 index 000000000000..55d6dd0fc8d4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_get_region_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_GetRegion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetRegionRequest( + name="name_value", + ) + + # Make the request + response = client.get_region(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_GetRegion_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_list_regions_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_list_regions_async.py new file mode 100644 index 000000000000..a3e8a1dfdb63 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_list_regions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRegions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_ListRegions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_regions(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListRegionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_regions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_ListRegions_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_list_regions_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_list_regions_sync.py new file mode 100644 index 000000000000..bbe022c4a0d4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_list_regions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRegions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_ListRegions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_regions(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListRegionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_regions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_ListRegions_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_update_region_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_update_region_async.py new file mode 100644 index 000000000000..f81c160ad52a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_update_region_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_UpdateRegion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateRegionRequest( + ) + + # Make the request + response = await client.update_region(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_UpdateRegion_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_update_region_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_update_region_sync.py new file mode 100644 index 000000000000..d280034963fd --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_regions_service_update_region_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateRegion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_RegionsService_UpdateRegion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_region(): + # Create a client + client = merchant_accounts_v1beta.RegionsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateRegionRequest( + ) + + # Make the request + response = client.update_region(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_RegionsService_UpdateRegion_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_async.py new file mode 100644 index 000000000000..46fc3d734b0b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetShippingSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ShippingSettingsService_GetShippingSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetShippingSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_shipping_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ShippingSettingsService_GetShippingSettings_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_sync.py new file mode 100644 index 000000000000..2d5a2e5a10ab --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetShippingSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ShippingSettingsService_GetShippingSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetShippingSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_shipping_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ShippingSettingsService_GetShippingSettings_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_async.py new file mode 100644 index 000000000000..a2c94110d1ab --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InsertShippingSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ShippingSettingsService_InsertShippingSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_insert_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient() + + # Initialize request argument(s) + shipping_setting = merchant_accounts_v1beta.ShippingSettings() + shipping_setting.etag = "etag_value" + + request = merchant_accounts_v1beta.InsertShippingSettingsRequest( + parent="parent_value", + shipping_setting=shipping_setting, + ) + + # Make the request + response = await client.insert_shipping_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ShippingSettingsService_InsertShippingSettings_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_sync.py new file mode 100644 index 000000000000..59a1ff9da8d8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InsertShippingSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_ShippingSettingsService_InsertShippingSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_insert_shipping_settings(): + # Create a client + client = merchant_accounts_v1beta.ShippingSettingsServiceClient() + + # Initialize request argument(s) + shipping_setting = merchant_accounts_v1beta.ShippingSettings() + shipping_setting.etag = "etag_value" + + request = merchant_accounts_v1beta.InsertShippingSettingsRequest( + parent="parent_value", + shipping_setting=shipping_setting, + ) + + # Make the request + response = client.insert_shipping_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ShippingSettingsService_InsertShippingSettings_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_async.py new file mode 100644 index 000000000000..1dc53b7d9cca --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTermsOfServiceAgreementState +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_GetTermsOfServiceAgreementState_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceAgreementStateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_GetTermsOfServiceAgreementState_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_sync.py new file mode 100644 index 000000000000..b9f49f6bf873 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTermsOfServiceAgreementState +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_GetTermsOfServiceAgreementState_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceAgreementStateRequest( + name="name_value", + ) + + # Make the request + response = client.get_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_GetTermsOfServiceAgreementState_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_async.py new file mode 100644 index 000000000000..6920a1e22519 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveForApplicationTermsOfServiceAgreementState +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_RetrieveForApplicationTermsOfServiceAgreementState_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_retrieve_for_application_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.retrieve_for_application_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_RetrieveForApplicationTermsOfServiceAgreementState_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_sync.py new file mode 100644 index 000000000000..acd3fb9382ba --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveForApplicationTermsOfServiceAgreementState +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_RetrieveForApplicationTermsOfServiceAgreementState_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_retrieve_for_application_terms_of_service_agreement_state(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + parent="parent_value", + ) + + # Make the request + response = client.retrieve_for_application_terms_of_service_agreement_state(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_RetrieveForApplicationTermsOfServiceAgreementState_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_async.py new file mode 100644 index 000000000000..f5cb63fabba4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AcceptTermsOfService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceService_AcceptTermsOfService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_accept_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.AcceptTermsOfServiceRequest( + name="name_value", + account="account_value", + region_code="region_code_value", + ) + + # Make the request + await client.accept_terms_of_service(request=request) + + +# [END merchantapi_v1beta_generated_TermsOfServiceService_AcceptTermsOfService_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_sync.py similarity index 68% rename from packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py rename to packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_sync.py index dc94e1d7b72b..12311a6c102d 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteDatasetVersion +# Snippet for AcceptTermsOfService # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-maps-mapsplatformdatasets +# python3 -m pip install google-shopping-merchant-accounts -# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_async] +# [START merchantapi_v1beta_generated_TermsOfServiceService_AcceptTermsOfService_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,20 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.maps import mapsplatformdatasets_v1alpha +from google.shopping import merchant_accounts_v1beta -async def sample_delete_dataset_version(): +def sample_accept_terms_of_service(): # Create a client - client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + client = merchant_accounts_v1beta.TermsOfServiceServiceClient() # Initialize request argument(s) - request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( + request = merchant_accounts_v1beta.AcceptTermsOfServiceRequest( name="name_value", + account="account_value", + region_code="region_code_value", ) # Make the request - await client.delete_dataset_version(request=request) + client.accept_terms_of_service(request=request) -# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_async] +# [END merchantapi_v1beta_generated_TermsOfServiceService_AcceptTermsOfService_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_async.py new file mode 100644 index 000000000000..e6038b8b6dbf --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTermsOfService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceService_GetTermsOfService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_terms_of_service(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceService_GetTermsOfService_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_sync.py new file mode 100644 index 000000000000..9bcf47546578 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTermsOfService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceService_GetTermsOfService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetTermsOfServiceRequest( + name="name_value", + ) + + # Make the request + response = client.get_terms_of_service(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceService_GetTermsOfService_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py new file mode 100644 index 000000000000..f41dc0f89b50 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveLatestTermsOfService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_retrieve_latest_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + ) + + # Make the request + response = await client.retrieve_latest_terms_of_service(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py new file mode 100644 index 000000000000..c9cc12dbc4eb --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveLatestTermsOfService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_retrieve_latest_terms_of_service(): + # Create a client + client = merchant_accounts_v1beta.TermsOfServiceServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + ) + + # Make the request + response = client.retrieve_latest_terms_of_service(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_create_user_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_create_user_async.py new file mode 100644 index 000000000000..5d4bf482dfc6 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_create_user_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_CreateUser_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_create_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", + ) + + # Make the request + response = await client.create_user(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_UserService_CreateUser_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_create_user_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_create_user_sync.py new file mode 100644 index 000000000000..7c17505acc95 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_create_user_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_CreateUser_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_create_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", + ) + + # Make the request + response = client.create_user(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_UserService_CreateUser_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_delete_user_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_delete_user_async.py new file mode 100644 index 000000000000..34e267d75fb7 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_delete_user_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_DeleteUser_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_delete_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteUserRequest( + name="name_value", + ) + + # Make the request + await client.delete_user(request=request) + + +# [END merchantapi_v1beta_generated_UserService_DeleteUser_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_delete_user_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_delete_user_sync.py new file mode 100644 index 000000000000..6cae572837e2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_delete_user_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_DeleteUser_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_delete_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.DeleteUserRequest( + name="name_value", + ) + + # Make the request + client.delete_user(request=request) + + +# [END merchantapi_v1beta_generated_UserService_DeleteUser_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_get_user_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_get_user_async.py new file mode 100644 index 000000000000..c41cdd33e146 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_get_user_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_GetUser_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetUserRequest( + name="name_value", + ) + + # Make the request + response = await client.get_user(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_UserService_GetUser_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_get_user_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_get_user_sync.py new file mode 100644 index 000000000000..6075da0fd471 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_get_user_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_GetUser_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetUserRequest( + name="name_value", + ) + + # Make the request + response = client.get_user(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_UserService_GetUser_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_list_users_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_list_users_async.py new file mode 100644 index 000000000000..cf6db7d5238e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_list_users_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_ListUsers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_list_users(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_UserService_ListUsers_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_list_users_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_list_users_sync.py new file mode 100644 index 000000000000..b9a963bd4995 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_list_users_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_ListUsers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_list_users(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.ListUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_users(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_UserService_ListUsers_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_update_user_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_update_user_async.py new file mode 100644 index 000000000000..e8d801d3ffcb --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_update_user_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_UpdateUser_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateUserRequest( + ) + + # Make the request + response = await client.update_user(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_UserService_UpdateUser_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_update_user_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_update_user_sync.py new file mode 100644 index 000000000000..e66e4bcdc87e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_user_service_update_user_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateUser +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_UserService_UpdateUser_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_user(): + # Create a client + client = merchant_accounts_v1beta.UserServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.UpdateUserRequest( + ) + + # Make the request + response = client.update_user(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_UserService_UpdateUser_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json new file mode 100644 index 000000000000..88443af2a082 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json @@ -0,0 +1,6954 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.shopping.merchant.accounts.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-shopping-merchant-accounts", + "version": "0.1.1" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountIssueServiceAsyncClient", + "shortName": "AccountIssueServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountIssueServiceAsyncClient.list_account_issues", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountIssueService.ListAccountIssues", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountIssueService", + "shortName": "AccountIssueService" + }, + "shortName": "ListAccountIssues" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.account_issue_service.pagers.ListAccountIssuesAsyncPager", + "shortName": "list_account_issues" + }, + "description": "Sample for ListAccountIssues", + "file": "merchantapi_v1beta_generated_account_issue_service_list_account_issues_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountIssueService_ListAccountIssues_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_issue_service_list_account_issues_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountIssueServiceClient", + "shortName": "AccountIssueServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountIssueServiceClient.list_account_issues", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountIssueService.ListAccountIssues", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountIssueService", + "shortName": "AccountIssueService" + }, + "shortName": "ListAccountIssues" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListAccountIssuesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.account_issue_service.pagers.ListAccountIssuesPager", + "shortName": "list_account_issues" + }, + "description": "Sample for ListAccountIssues", + "file": "merchantapi_v1beta_generated_account_issue_service_list_account_issues_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountIssueService_ListAccountIssues_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_issue_service_list_account_issues_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceAsyncClient", + "shortName": "AccountTaxServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceAsyncClient.get_account_tax", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService.GetAccountTax", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService", + "shortName": "AccountTaxService" + }, + "shortName": "GetAccountTax" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAccountTaxRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AccountTax", + "shortName": "get_account_tax" + }, + "description": "Sample for GetAccountTax", + "file": "merchantapi_v1beta_generated_account_tax_service_get_account_tax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountTaxService_GetAccountTax_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_tax_service_get_account_tax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceClient", + "shortName": "AccountTaxServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceClient.get_account_tax", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService.GetAccountTax", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService", + "shortName": "AccountTaxService" + }, + "shortName": "GetAccountTax" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAccountTaxRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AccountTax", + "shortName": "get_account_tax" + }, + "description": "Sample for GetAccountTax", + "file": "merchantapi_v1beta_generated_account_tax_service_get_account_tax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountTaxService_GetAccountTax_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_tax_service_get_account_tax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceAsyncClient", + "shortName": "AccountTaxServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceAsyncClient.list_account_tax", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService.ListAccountTax", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService", + "shortName": "AccountTaxService" + }, + "shortName": "ListAccountTax" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListAccountTaxRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.account_tax_service.pagers.ListAccountTaxAsyncPager", + "shortName": "list_account_tax" + }, + "description": "Sample for ListAccountTax", + "file": "merchantapi_v1beta_generated_account_tax_service_list_account_tax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountTaxService_ListAccountTax_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_tax_service_list_account_tax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceClient", + "shortName": "AccountTaxServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceClient.list_account_tax", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService.ListAccountTax", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService", + "shortName": "AccountTaxService" + }, + "shortName": "ListAccountTax" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListAccountTaxRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.account_tax_service.pagers.ListAccountTaxPager", + "shortName": "list_account_tax" + }, + "description": "Sample for ListAccountTax", + "file": "merchantapi_v1beta_generated_account_tax_service_list_account_tax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountTaxService_ListAccountTax_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_tax_service_list_account_tax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceAsyncClient", + "shortName": "AccountTaxServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceAsyncClient.update_account_tax", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService.UpdateAccountTax", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService", + "shortName": "AccountTaxService" + }, + "shortName": "UpdateAccountTax" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAccountTaxRequest" + }, + { + "name": "account_tax", + "type": "google.shopping.merchant_accounts_v1beta.types.AccountTax" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AccountTax", + "shortName": "update_account_tax" + }, + "description": "Sample for UpdateAccountTax", + "file": "merchantapi_v1beta_generated_account_tax_service_update_account_tax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountTaxService_UpdateAccountTax_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_tax_service_update_account_tax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceClient", + "shortName": "AccountTaxServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountTaxServiceClient.update_account_tax", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService.UpdateAccountTax", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountTaxService", + "shortName": "AccountTaxService" + }, + "shortName": "UpdateAccountTax" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAccountTaxRequest" + }, + { + "name": "account_tax", + "type": "google.shopping.merchant_accounts_v1beta.types.AccountTax" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AccountTax", + "shortName": "update_account_tax" + }, + "description": "Sample for UpdateAccountTax", + "file": "merchantapi_v1beta_generated_account_tax_service_update_account_tax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountTaxService_UpdateAccountTax_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_account_tax_service_update_account_tax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient", + "shortName": "AccountsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient.create_and_configure_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.CreateAndConfigureAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "CreateAndConfigureAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Account", + "shortName": "create_and_configure_account" + }, + "description": "Sample for CreateAndConfigureAccount", + "file": "merchantapi_v1beta_generated_accounts_service_create_and_configure_account_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_CreateAndConfigureAccount_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_create_and_configure_account_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient", + "shortName": "AccountsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient.create_and_configure_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.CreateAndConfigureAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "CreateAndConfigureAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Account", + "shortName": "create_and_configure_account" + }, + "description": "Sample for CreateAndConfigureAccount", + "file": "merchantapi_v1beta_generated_accounts_service_create_and_configure_account_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_CreateAndConfigureAccount_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_create_and_configure_account_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient", + "shortName": "AccountsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient.delete_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.DeleteAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "DeleteAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DeleteAccountRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_account" + }, + "description": "Sample for DeleteAccount", + "file": "merchantapi_v1beta_generated_accounts_service_delete_account_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_DeleteAccount_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_delete_account_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient", + "shortName": "AccountsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient.delete_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.DeleteAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "DeleteAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DeleteAccountRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_account" + }, + "description": "Sample for DeleteAccount", + "file": "merchantapi_v1beta_generated_accounts_service_delete_account_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_DeleteAccount_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_delete_account_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient", + "shortName": "AccountsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient.get_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.GetAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "GetAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAccountRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Account", + "shortName": "get_account" + }, + "description": "Sample for GetAccount", + "file": "merchantapi_v1beta_generated_accounts_service_get_account_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_GetAccount_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_get_account_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient", + "shortName": "AccountsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient.get_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.GetAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "GetAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAccountRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Account", + "shortName": "get_account" + }, + "description": "Sample for GetAccount", + "file": "merchantapi_v1beta_generated_accounts_service_get_account_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_GetAccount_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_get_account_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient", + "shortName": "AccountsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient.list_accounts", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.ListAccounts", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "ListAccounts" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListAccountsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListAccountsAsyncPager", + "shortName": "list_accounts" + }, + "description": "Sample for ListAccounts", + "file": "merchantapi_v1beta_generated_accounts_service_list_accounts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_ListAccounts_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_list_accounts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient", + "shortName": "AccountsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient.list_accounts", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.ListAccounts", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "ListAccounts" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListAccountsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListAccountsPager", + "shortName": "list_accounts" + }, + "description": "Sample for ListAccounts", + "file": "merchantapi_v1beta_generated_accounts_service_list_accounts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_ListAccounts_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_list_accounts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient", + "shortName": "AccountsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient.list_sub_accounts", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.ListSubAccounts", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "ListSubAccounts" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListSubAccountsRequest" + }, + { + "name": "provider", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListSubAccountsAsyncPager", + "shortName": "list_sub_accounts" + }, + "description": "Sample for ListSubAccounts", + "file": "merchantapi_v1beta_generated_accounts_service_list_sub_accounts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_ListSubAccounts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_list_sub_accounts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient", + "shortName": "AccountsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient.list_sub_accounts", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.ListSubAccounts", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "ListSubAccounts" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListSubAccountsRequest" + }, + { + "name": "provider", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.accounts_service.pagers.ListSubAccountsPager", + "shortName": "list_sub_accounts" + }, + "description": "Sample for ListSubAccounts", + "file": "merchantapi_v1beta_generated_accounts_service_list_sub_accounts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_ListSubAccounts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_list_sub_accounts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient", + "shortName": "AccountsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceAsyncClient.update_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.UpdateAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "UpdateAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAccountRequest" + }, + { + "name": "account", + "type": "google.shopping.merchant_accounts_v1beta.types.Account" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Account", + "shortName": "update_account" + }, + "description": "Sample for UpdateAccount", + "file": "merchantapi_v1beta_generated_accounts_service_update_account_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_UpdateAccount_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_update_account_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient", + "shortName": "AccountsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AccountsServiceClient.update_account", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService.UpdateAccount", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AccountsService", + "shortName": "AccountsService" + }, + "shortName": "UpdateAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAccountRequest" + }, + { + "name": "account", + "type": "google.shopping.merchant_accounts_v1beta.types.Account" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Account", + "shortName": "update_account" + }, + "description": "Sample for UpdateAccount", + "file": "merchantapi_v1beta_generated_accounts_service_update_account_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AccountsService_UpdateAccount_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_accounts_service_update_account_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient", + "shortName": "BusinessIdentityServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient.get_business_identity", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService.GetBusinessIdentity", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService", + "shortName": "BusinessIdentityService" + }, + "shortName": "GetBusinessIdentity" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetBusinessIdentityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessIdentity", + "shortName": "get_business_identity" + }, + "description": "Sample for GetBusinessIdentity", + "file": "merchantapi_v1beta_generated_business_identity_service_get_business_identity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessIdentityService_GetBusinessIdentity_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_identity_service_get_business_identity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceClient", + "shortName": "BusinessIdentityServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceClient.get_business_identity", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService.GetBusinessIdentity", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService", + "shortName": "BusinessIdentityService" + }, + "shortName": "GetBusinessIdentity" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetBusinessIdentityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessIdentity", + "shortName": "get_business_identity" + }, + "description": "Sample for GetBusinessIdentity", + "file": "merchantapi_v1beta_generated_business_identity_service_get_business_identity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessIdentityService_GetBusinessIdentity_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_identity_service_get_business_identity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient", + "shortName": "BusinessIdentityServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceAsyncClient.update_business_identity", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService.UpdateBusinessIdentity", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService", + "shortName": "BusinessIdentityService" + }, + "shortName": "UpdateBusinessIdentity" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateBusinessIdentityRequest" + }, + { + "name": "business_identity", + "type": "google.shopping.merchant_accounts_v1beta.types.BusinessIdentity" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessIdentity", + "shortName": "update_business_identity" + }, + "description": "Sample for UpdateBusinessIdentity", + "file": "merchantapi_v1beta_generated_business_identity_service_update_business_identity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessIdentityService_UpdateBusinessIdentity_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_identity_service_update_business_identity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceClient", + "shortName": "BusinessIdentityServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessIdentityServiceClient.update_business_identity", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService.UpdateBusinessIdentity", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessIdentityService", + "shortName": "BusinessIdentityService" + }, + "shortName": "UpdateBusinessIdentity" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateBusinessIdentityRequest" + }, + { + "name": "business_identity", + "type": "google.shopping.merchant_accounts_v1beta.types.BusinessIdentity" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessIdentity", + "shortName": "update_business_identity" + }, + "description": "Sample for UpdateBusinessIdentity", + "file": "merchantapi_v1beta_generated_business_identity_service_update_business_identity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessIdentityService_UpdateBusinessIdentity_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_identity_service_update_business_identity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceAsyncClient", + "shortName": "BusinessInfoServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceAsyncClient.get_business_info", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService.GetBusinessInfo", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService", + "shortName": "BusinessInfoService" + }, + "shortName": "GetBusinessInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetBusinessInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessInfo", + "shortName": "get_business_info" + }, + "description": "Sample for GetBusinessInfo", + "file": "merchantapi_v1beta_generated_business_info_service_get_business_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessInfoService_GetBusinessInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_info_service_get_business_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceClient", + "shortName": "BusinessInfoServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceClient.get_business_info", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService.GetBusinessInfo", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService", + "shortName": "BusinessInfoService" + }, + "shortName": "GetBusinessInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetBusinessInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessInfo", + "shortName": "get_business_info" + }, + "description": "Sample for GetBusinessInfo", + "file": "merchantapi_v1beta_generated_business_info_service_get_business_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessInfoService_GetBusinessInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_info_service_get_business_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceAsyncClient", + "shortName": "BusinessInfoServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceAsyncClient.update_business_info", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService.UpdateBusinessInfo", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService", + "shortName": "BusinessInfoService" + }, + "shortName": "UpdateBusinessInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateBusinessInfoRequest" + }, + { + "name": "business_info", + "type": "google.shopping.merchant_accounts_v1beta.types.BusinessInfo" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessInfo", + "shortName": "update_business_info" + }, + "description": "Sample for UpdateBusinessInfo", + "file": "merchantapi_v1beta_generated_business_info_service_update_business_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessInfoService_UpdateBusinessInfo_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_info_service_update_business_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceClient", + "shortName": "BusinessInfoServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.BusinessInfoServiceClient.update_business_info", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService.UpdateBusinessInfo", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.BusinessInfoService", + "shortName": "BusinessInfoService" + }, + "shortName": "UpdateBusinessInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateBusinessInfoRequest" + }, + { + "name": "business_info", + "type": "google.shopping.merchant_accounts_v1beta.types.BusinessInfo" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.BusinessInfo", + "shortName": "update_business_info" + }, + "description": "Sample for UpdateBusinessInfo", + "file": "merchantapi_v1beta_generated_business_info_service_update_business_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_BusinessInfoService_UpdateBusinessInfo_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_business_info_service_update_business_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient", + "shortName": "EmailPreferencesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient.get_email_preferences", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService.GetEmailPreferences", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService", + "shortName": "EmailPreferencesService" + }, + "shortName": "GetEmailPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetEmailPreferencesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.EmailPreferences", + "shortName": "get_email_preferences" + }, + "description": "Sample for GetEmailPreferences", + "file": "merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_EmailPreferencesService_GetEmailPreferences_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceClient", + "shortName": "EmailPreferencesServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceClient.get_email_preferences", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService.GetEmailPreferences", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService", + "shortName": "EmailPreferencesService" + }, + "shortName": "GetEmailPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetEmailPreferencesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.EmailPreferences", + "shortName": "get_email_preferences" + }, + "description": "Sample for GetEmailPreferences", + "file": "merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_EmailPreferencesService_GetEmailPreferences_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_email_preferences_service_get_email_preferences_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient", + "shortName": "EmailPreferencesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceAsyncClient.update_email_preferences", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService.UpdateEmailPreferences", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService", + "shortName": "EmailPreferencesService" + }, + "shortName": "UpdateEmailPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateEmailPreferencesRequest" + }, + { + "name": "email_preferences", + "type": "google.shopping.merchant_accounts_v1beta.types.EmailPreferences" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.EmailPreferences", + "shortName": "update_email_preferences" + }, + "description": "Sample for UpdateEmailPreferences", + "file": "merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_EmailPreferencesService_UpdateEmailPreferences_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceClient", + "shortName": "EmailPreferencesServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.EmailPreferencesServiceClient.update_email_preferences", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService.UpdateEmailPreferences", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.EmailPreferencesService", + "shortName": "EmailPreferencesService" + }, + "shortName": "UpdateEmailPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateEmailPreferencesRequest" + }, + { + "name": "email_preferences", + "type": "google.shopping.merchant_accounts_v1beta.types.EmailPreferences" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.EmailPreferences", + "shortName": "update_email_preferences" + }, + "description": "Sample for UpdateEmailPreferences", + "file": "merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_EmailPreferencesService_UpdateEmailPreferences_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_email_preferences_service_update_email_preferences_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient", + "shortName": "HomepageServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient.claim_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.ClaimHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "ClaimHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ClaimHomepageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "claim_homepage" + }, + "description": "Sample for ClaimHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_claim_homepage_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_ClaimHomepage_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_claim_homepage_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient", + "shortName": "HomepageServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient.claim_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.ClaimHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "ClaimHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ClaimHomepageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "claim_homepage" + }, + "description": "Sample for ClaimHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_claim_homepage_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_ClaimHomepage_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_claim_homepage_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient", + "shortName": "HomepageServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient.get_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.GetHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "GetHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetHomepageRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "get_homepage" + }, + "description": "Sample for GetHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_get_homepage_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_GetHomepage_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_get_homepage_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient", + "shortName": "HomepageServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient.get_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.GetHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "GetHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetHomepageRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "get_homepage" + }, + "description": "Sample for GetHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_get_homepage_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_GetHomepage_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_get_homepage_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient", + "shortName": "HomepageServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient.unclaim_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.UnclaimHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "UnclaimHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UnclaimHomepageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "unclaim_homepage" + }, + "description": "Sample for UnclaimHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_unclaim_homepage_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_UnclaimHomepage_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_unclaim_homepage_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient", + "shortName": "HomepageServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient.unclaim_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.UnclaimHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "UnclaimHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UnclaimHomepageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "unclaim_homepage" + }, + "description": "Sample for UnclaimHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_unclaim_homepage_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_UnclaimHomepage_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_unclaim_homepage_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient", + "shortName": "HomepageServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceAsyncClient.update_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.UpdateHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "UpdateHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateHomepageRequest" + }, + { + "name": "homepage", + "type": "google.shopping.merchant_accounts_v1beta.types.Homepage" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "update_homepage" + }, + "description": "Sample for UpdateHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_update_homepage_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_UpdateHomepage_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_update_homepage_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient", + "shortName": "HomepageServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.HomepageServiceClient.update_homepage", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService.UpdateHomepage", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.HomepageService", + "shortName": "HomepageService" + }, + "shortName": "UpdateHomepage" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateHomepageRequest" + }, + { + "name": "homepage", + "type": "google.shopping.merchant_accounts_v1beta.types.Homepage" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Homepage", + "shortName": "update_homepage" + }, + "description": "Sample for UpdateHomepage", + "file": "merchantapi_v1beta_generated_homepage_service_update_homepage_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_HomepageService_UpdateHomepage_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_homepage_service_update_homepage_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient", + "shortName": "OnlineReturnPolicyServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient.get_online_return_policy", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService.GetOnlineReturnPolicy", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService", + "shortName": "OnlineReturnPolicyService" + }, + "shortName": "GetOnlineReturnPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetOnlineReturnPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy", + "shortName": "get_online_return_policy" + }, + "description": "Sample for GetOnlineReturnPolicy", + "file": "merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_OnlineReturnPolicyService_GetOnlineReturnPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceClient", + "shortName": "OnlineReturnPolicyServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceClient.get_online_return_policy", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService.GetOnlineReturnPolicy", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService", + "shortName": "OnlineReturnPolicyService" + }, + "shortName": "GetOnlineReturnPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetOnlineReturnPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.OnlineReturnPolicy", + "shortName": "get_online_return_policy" + }, + "description": "Sample for GetOnlineReturnPolicy", + "file": "merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_OnlineReturnPolicyService_GetOnlineReturnPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_online_return_policy_service_get_online_return_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient", + "shortName": "OnlineReturnPolicyServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceAsyncClient.list_online_return_policies", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService.ListOnlineReturnPolicies", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService", + "shortName": "OnlineReturnPolicyService" + }, + "shortName": "ListOnlineReturnPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.pagers.ListOnlineReturnPoliciesAsyncPager", + "shortName": "list_online_return_policies" + }, + "description": "Sample for ListOnlineReturnPolicies", + "file": "merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_OnlineReturnPolicyService_ListOnlineReturnPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceClient", + "shortName": "OnlineReturnPolicyServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.OnlineReturnPolicyServiceClient.list_online_return_policies", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService.ListOnlineReturnPolicies", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyService", + "shortName": "OnlineReturnPolicyService" + }, + "shortName": "ListOnlineReturnPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListOnlineReturnPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.pagers.ListOnlineReturnPoliciesPager", + "shortName": "list_online_return_policies" + }, + "description": "Sample for ListOnlineReturnPolicies", + "file": "merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_OnlineReturnPolicyService_ListOnlineReturnPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_online_return_policy_service_list_online_return_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient", + "shortName": "ProgramsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient.disable_program", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.DisableProgram", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "DisableProgram" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DisableProgramRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Program", + "shortName": "disable_program" + }, + "description": "Sample for DisableProgram", + "file": "merchantapi_v1beta_generated_programs_service_disable_program_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_DisableProgram_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_disable_program_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient", + "shortName": "ProgramsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient.disable_program", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.DisableProgram", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "DisableProgram" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DisableProgramRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Program", + "shortName": "disable_program" + }, + "description": "Sample for DisableProgram", + "file": "merchantapi_v1beta_generated_programs_service_disable_program_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_DisableProgram_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_disable_program_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient", + "shortName": "ProgramsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient.enable_program", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.EnableProgram", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "EnableProgram" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.EnableProgramRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Program", + "shortName": "enable_program" + }, + "description": "Sample for EnableProgram", + "file": "merchantapi_v1beta_generated_programs_service_enable_program_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_EnableProgram_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_enable_program_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient", + "shortName": "ProgramsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient.enable_program", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.EnableProgram", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "EnableProgram" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.EnableProgramRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Program", + "shortName": "enable_program" + }, + "description": "Sample for EnableProgram", + "file": "merchantapi_v1beta_generated_programs_service_enable_program_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_EnableProgram_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_enable_program_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient", + "shortName": "ProgramsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient.get_program", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.GetProgram", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "GetProgram" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetProgramRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Program", + "shortName": "get_program" + }, + "description": "Sample for GetProgram", + "file": "merchantapi_v1beta_generated_programs_service_get_program_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_GetProgram_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_get_program_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient", + "shortName": "ProgramsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient.get_program", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.GetProgram", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "GetProgram" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetProgramRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Program", + "shortName": "get_program" + }, + "description": "Sample for GetProgram", + "file": "merchantapi_v1beta_generated_programs_service_get_program_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_GetProgram_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_get_program_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient", + "shortName": "ProgramsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceAsyncClient.list_programs", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.ListPrograms", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "ListPrograms" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListProgramsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.programs_service.pagers.ListProgramsAsyncPager", + "shortName": "list_programs" + }, + "description": "Sample for ListPrograms", + "file": "merchantapi_v1beta_generated_programs_service_list_programs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_ListPrograms_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_list_programs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient", + "shortName": "ProgramsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ProgramsServiceClient.list_programs", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService.ListPrograms", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ProgramsService", + "shortName": "ProgramsService" + }, + "shortName": "ListPrograms" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListProgramsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.programs_service.pagers.ListProgramsPager", + "shortName": "list_programs" + }, + "description": "Sample for ListPrograms", + "file": "merchantapi_v1beta_generated_programs_service_list_programs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProgramsService_ListPrograms_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_programs_service_list_programs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient", + "shortName": "RegionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient.create_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.CreateRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "CreateRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.CreateRegionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "region", + "type": "google.shopping.merchant_accounts_v1beta.types.Region" + }, + { + "name": "region_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Region", + "shortName": "create_region" + }, + "description": "Sample for CreateRegion", + "file": "merchantapi_v1beta_generated_regions_service_create_region_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_CreateRegion_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_create_region_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient", + "shortName": "RegionsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient.create_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.CreateRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "CreateRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.CreateRegionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "region", + "type": "google.shopping.merchant_accounts_v1beta.types.Region" + }, + { + "name": "region_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Region", + "shortName": "create_region" + }, + "description": "Sample for CreateRegion", + "file": "merchantapi_v1beta_generated_regions_service_create_region_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_CreateRegion_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_create_region_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient", + "shortName": "RegionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient.delete_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.DeleteRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "DeleteRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DeleteRegionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_region" + }, + "description": "Sample for DeleteRegion", + "file": "merchantapi_v1beta_generated_regions_service_delete_region_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_DeleteRegion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_delete_region_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient", + "shortName": "RegionsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient.delete_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.DeleteRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "DeleteRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DeleteRegionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_region" + }, + "description": "Sample for DeleteRegion", + "file": "merchantapi_v1beta_generated_regions_service_delete_region_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_DeleteRegion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_delete_region_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient", + "shortName": "RegionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient.get_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.GetRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "GetRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetRegionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Region", + "shortName": "get_region" + }, + "description": "Sample for GetRegion", + "file": "merchantapi_v1beta_generated_regions_service_get_region_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_GetRegion_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_get_region_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient", + "shortName": "RegionsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient.get_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.GetRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "GetRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetRegionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Region", + "shortName": "get_region" + }, + "description": "Sample for GetRegion", + "file": "merchantapi_v1beta_generated_regions_service_get_region_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_GetRegion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_get_region_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient", + "shortName": "RegionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient.list_regions", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.ListRegions", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "ListRegions" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListRegionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.regions_service.pagers.ListRegionsAsyncPager", + "shortName": "list_regions" + }, + "description": "Sample for ListRegions", + "file": "merchantapi_v1beta_generated_regions_service_list_regions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_ListRegions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_list_regions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient", + "shortName": "RegionsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient.list_regions", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.ListRegions", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "ListRegions" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListRegionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.regions_service.pagers.ListRegionsPager", + "shortName": "list_regions" + }, + "description": "Sample for ListRegions", + "file": "merchantapi_v1beta_generated_regions_service_list_regions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_ListRegions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_list_regions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient", + "shortName": "RegionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceAsyncClient.update_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.UpdateRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "UpdateRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateRegionRequest" + }, + { + "name": "region", + "type": "google.shopping.merchant_accounts_v1beta.types.Region" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Region", + "shortName": "update_region" + }, + "description": "Sample for UpdateRegion", + "file": "merchantapi_v1beta_generated_regions_service_update_region_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_UpdateRegion_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_update_region_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient", + "shortName": "RegionsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.RegionsServiceClient.update_region", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService.UpdateRegion", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.RegionsService", + "shortName": "RegionsService" + }, + "shortName": "UpdateRegion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateRegionRequest" + }, + { + "name": "region", + "type": "google.shopping.merchant_accounts_v1beta.types.Region" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.Region", + "shortName": "update_region" + }, + "description": "Sample for UpdateRegion", + "file": "merchantapi_v1beta_generated_regions_service_update_region_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_RegionsService_UpdateRegion_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_regions_service_update_region_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient", + "shortName": "ShippingSettingsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient.get_shipping_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService.GetShippingSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService", + "shortName": "ShippingSettingsService" + }, + "shortName": "GetShippingSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetShippingSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.ShippingSettings", + "shortName": "get_shipping_settings" + }, + "description": "Sample for GetShippingSettings", + "file": "merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ShippingSettingsService_GetShippingSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceClient", + "shortName": "ShippingSettingsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceClient.get_shipping_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService.GetShippingSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService", + "shortName": "ShippingSettingsService" + }, + "shortName": "GetShippingSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetShippingSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.ShippingSettings", + "shortName": "get_shipping_settings" + }, + "description": "Sample for GetShippingSettings", + "file": "merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ShippingSettingsService_GetShippingSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_shipping_settings_service_get_shipping_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient", + "shortName": "ShippingSettingsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceAsyncClient.insert_shipping_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService.InsertShippingSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService", + "shortName": "ShippingSettingsService" + }, + "shortName": "InsertShippingSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.InsertShippingSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.ShippingSettings", + "shortName": "insert_shipping_settings" + }, + "description": "Sample for InsertShippingSettings", + "file": "merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ShippingSettingsService_InsertShippingSettings_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceClient", + "shortName": "ShippingSettingsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.ShippingSettingsServiceClient.insert_shipping_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService.InsertShippingSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.ShippingSettingsService", + "shortName": "ShippingSettingsService" + }, + "shortName": "InsertShippingSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.InsertShippingSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.ShippingSettings", + "shortName": "insert_shipping_settings" + }, + "description": "Sample for InsertShippingSettings", + "file": "merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ShippingSettingsService_InsertShippingSettings_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_shipping_settings_service_insert_shipping_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient", + "shortName": "TermsOfServiceAgreementStateServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient.get_terms_of_service_agreement_state", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService.GetTermsOfServiceAgreementState", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService", + "shortName": "TermsOfServiceAgreementStateService" + }, + "shortName": "GetTermsOfServiceAgreementState" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceAgreementStateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState", + "shortName": "get_terms_of_service_agreement_state" + }, + "description": "Sample for GetTermsOfServiceAgreementState", + "file": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_GetTermsOfServiceAgreementState_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient", + "shortName": "TermsOfServiceAgreementStateServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient.get_terms_of_service_agreement_state", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService.GetTermsOfServiceAgreementState", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService", + "shortName": "TermsOfServiceAgreementStateService" + }, + "shortName": "GetTermsOfServiceAgreementState" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceAgreementStateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState", + "shortName": "get_terms_of_service_agreement_state" + }, + "description": "Sample for GetTermsOfServiceAgreementState", + "file": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_GetTermsOfServiceAgreementState_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_get_terms_of_service_agreement_state_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient", + "shortName": "TermsOfServiceAgreementStateServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceAsyncClient.retrieve_for_application_terms_of_service_agreement_state", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService.RetrieveForApplicationTermsOfServiceAgreementState", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService", + "shortName": "TermsOfServiceAgreementStateService" + }, + "shortName": "RetrieveForApplicationTermsOfServiceAgreementState" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.RetrieveForApplicationTermsOfServiceAgreementStateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState", + "shortName": "retrieve_for_application_terms_of_service_agreement_state" + }, + "description": "Sample for RetrieveForApplicationTermsOfServiceAgreementState", + "file": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_RetrieveForApplicationTermsOfServiceAgreementState_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient", + "shortName": "TermsOfServiceAgreementStateServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceAgreementStateServiceClient.retrieve_for_application_terms_of_service_agreement_state", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService.RetrieveForApplicationTermsOfServiceAgreementState", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceAgreementStateService", + "shortName": "TermsOfServiceAgreementStateService" + }, + "shortName": "RetrieveForApplicationTermsOfServiceAgreementState" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.RetrieveForApplicationTermsOfServiceAgreementStateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfServiceAgreementState", + "shortName": "retrieve_for_application_terms_of_service_agreement_state" + }, + "description": "Sample for RetrieveForApplicationTermsOfServiceAgreementState", + "file": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceAgreementStateService_RetrieveForApplicationTermsOfServiceAgreementState_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_agreement_state_service_retrieve_for_application_terms_of_service_agreement_state_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient", + "shortName": "TermsOfServiceServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient.accept_terms_of_service", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService.AcceptTermsOfService", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService", + "shortName": "TermsOfServiceService" + }, + "shortName": "AcceptTermsOfService" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.AcceptTermsOfServiceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "accept_terms_of_service" + }, + "description": "Sample for AcceptTermsOfService", + "file": "merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_AcceptTermsOfService_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceClient", + "shortName": "TermsOfServiceServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceClient.accept_terms_of_service", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService.AcceptTermsOfService", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService", + "shortName": "TermsOfServiceService" + }, + "shortName": "AcceptTermsOfService" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.AcceptTermsOfServiceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "accept_terms_of_service" + }, + "description": "Sample for AcceptTermsOfService", + "file": "merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_AcceptTermsOfService_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_service_accept_terms_of_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient", + "shortName": "TermsOfServiceServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient.get_terms_of_service", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService.GetTermsOfService", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService", + "shortName": "TermsOfServiceService" + }, + "shortName": "GetTermsOfService" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfService", + "shortName": "get_terms_of_service" + }, + "description": "Sample for GetTermsOfService", + "file": "merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_GetTermsOfService_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceClient", + "shortName": "TermsOfServiceServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceClient.get_terms_of_service", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService.GetTermsOfService", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService", + "shortName": "TermsOfServiceService" + }, + "shortName": "GetTermsOfService" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetTermsOfServiceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfService", + "shortName": "get_terms_of_service" + }, + "description": "Sample for GetTermsOfService", + "file": "merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_GetTermsOfService_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_service_get_terms_of_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient", + "shortName": "TermsOfServiceServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceAsyncClient.retrieve_latest_terms_of_service", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService.RetrieveLatestTermsOfService", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService", + "shortName": "TermsOfServiceService" + }, + "shortName": "RetrieveLatestTermsOfService" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.RetrieveLatestTermsOfServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfService", + "shortName": "retrieve_latest_terms_of_service" + }, + "description": "Sample for RetrieveLatestTermsOfService", + "file": "merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceClient", + "shortName": "TermsOfServiceServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.TermsOfServiceServiceClient.retrieve_latest_terms_of_service", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService.RetrieveLatestTermsOfService", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.TermsOfServiceService", + "shortName": "TermsOfServiceService" + }, + "shortName": "RetrieveLatestTermsOfService" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.RetrieveLatestTermsOfServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.TermsOfService", + "shortName": "retrieve_latest_terms_of_service" + }, + "description": "Sample for RetrieveLatestTermsOfService", + "file": "merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient", + "shortName": "UserServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient.create_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.CreateUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "CreateUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.CreateUserRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "user", + "type": "google.shopping.merchant_accounts_v1beta.types.User" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.User", + "shortName": "create_user" + }, + "description": "Sample for CreateUser", + "file": "merchantapi_v1beta_generated_user_service_create_user_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_CreateUser_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_create_user_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient", + "shortName": "UserServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient.create_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.CreateUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "CreateUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.CreateUserRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "user", + "type": "google.shopping.merchant_accounts_v1beta.types.User" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.User", + "shortName": "create_user" + }, + "description": "Sample for CreateUser", + "file": "merchantapi_v1beta_generated_user_service_create_user_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_CreateUser_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_create_user_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient", + "shortName": "UserServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient.delete_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.DeleteUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "DeleteUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DeleteUserRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_user" + }, + "description": "Sample for DeleteUser", + "file": "merchantapi_v1beta_generated_user_service_delete_user_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_DeleteUser_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_delete_user_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient", + "shortName": "UserServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient.delete_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.DeleteUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "DeleteUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.DeleteUserRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_user" + }, + "description": "Sample for DeleteUser", + "file": "merchantapi_v1beta_generated_user_service_delete_user_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_DeleteUser_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_delete_user_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient", + "shortName": "UserServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient.get_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.GetUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "GetUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetUserRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.User", + "shortName": "get_user" + }, + "description": "Sample for GetUser", + "file": "merchantapi_v1beta_generated_user_service_get_user_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_GetUser_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_get_user_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient", + "shortName": "UserServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient.get_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.GetUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "GetUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetUserRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.User", + "shortName": "get_user" + }, + "description": "Sample for GetUser", + "file": "merchantapi_v1beta_generated_user_service_get_user_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_GetUser_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_get_user_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient", + "shortName": "UserServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient.list_users", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.ListUsers", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "ListUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.user_service.pagers.ListUsersAsyncPager", + "shortName": "list_users" + }, + "description": "Sample for ListUsers", + "file": "merchantapi_v1beta_generated_user_service_list_users_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_ListUsers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_list_users_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient", + "shortName": "UserServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient.list_users", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.ListUsers", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "ListUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.ListUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.services.user_service.pagers.ListUsersPager", + "shortName": "list_users" + }, + "description": "Sample for ListUsers", + "file": "merchantapi_v1beta_generated_user_service_list_users_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_ListUsers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_list_users_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient", + "shortName": "UserServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceAsyncClient.update_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.UpdateUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "UpdateUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateUserRequest" + }, + { + "name": "user", + "type": "google.shopping.merchant_accounts_v1beta.types.User" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.User", + "shortName": "update_user" + }, + "description": "Sample for UpdateUser", + "file": "merchantapi_v1beta_generated_user_service_update_user_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_UpdateUser_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_update_user_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient", + "shortName": "UserServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.UserServiceClient.update_user", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService.UpdateUser", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.UserService", + "shortName": "UserService" + }, + "shortName": "UpdateUser" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateUserRequest" + }, + { + "name": "user", + "type": "google.shopping.merchant_accounts_v1beta.types.User" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.User", + "shortName": "update_user" + }, + "description": "Sample for UpdateUser", + "file": "merchantapi_v1beta_generated_user_service_update_user_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_UserService_UpdateUser_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_user_service_update_user_sync.py" + } + ] +} diff --git a/packages/google-shopping-merchant-accounts/scripts/decrypt-secrets.sh b/packages/google-shopping-merchant-accounts/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py b/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py new file mode 100644 index 000000000000..fb344faa3a22 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py @@ -0,0 +1,218 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class merchant_accountsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'accept_terms_of_service': ('name', 'account', 'region_code', ), + 'claim_homepage': ('name', ), + 'create_and_configure_account': ('account', 'users', 'accept_terms_of_service', 'service', ), + 'create_region': ('parent', 'region_id', 'region', ), + 'create_user': ('parent', 'user_id', 'user', ), + 'delete_account': ('name', ), + 'delete_region': ('name', ), + 'delete_user': ('name', ), + 'disable_program': ('name', ), + 'enable_program': ('name', ), + 'get_account': ('name', ), + 'get_account_tax': ('name', ), + 'get_business_identity': ('name', ), + 'get_business_info': ('name', ), + 'get_email_preferences': ('name', ), + 'get_homepage': ('name', ), + 'get_online_return_policy': ('name', ), + 'get_program': ('name', ), + 'get_region': ('name', ), + 'get_shipping_settings': ('name', ), + 'get_terms_of_service': ('name', ), + 'get_terms_of_service_agreement_state': ('name', ), + 'get_user': ('name', ), + 'insert_shipping_settings': ('parent', 'shipping_setting', ), + 'list_account_issues': ('parent', 'page_size', 'page_token', 'language_code', 'time_zone', ), + 'list_accounts': ('page_size', 'page_token', 'filter', ), + 'list_account_tax': ('parent', 'page_size', 'page_token', ), + 'list_online_return_policies': ('parent', 'page_size', 'page_token', ), + 'list_programs': ('parent', 'page_size', 'page_token', ), + 'list_regions': ('parent', 'page_size', 'page_token', ), + 'list_sub_accounts': ('provider', 'page_size', 'page_token', ), + 'list_users': ('parent', 'page_size', 'page_token', ), + 'retrieve_for_application_terms_of_service_agreement_state': ('parent', ), + 'retrieve_latest_terms_of_service': ('region_code', 'kind', ), + 'unclaim_homepage': ('name', ), + 'update_account': ('account', 'update_mask', ), + 'update_account_tax': ('account_tax', 'update_mask', ), + 'update_business_identity': ('business_identity', 'update_mask', ), + 'update_business_info': ('business_info', 'update_mask', ), + 'update_email_preferences': ('email_preferences', 'update_mask', ), + 'update_homepage': ('homepage', 'update_mask', ), + 'update_region': ('region', 'update_mask', ), + 'update_user': ('user', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=merchant_accountsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the merchant_accounts client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-shopping-merchant-accounts/setup.py b/packages/google-shopping-merchant-accounts/setup.py new file mode 100644 index 000000000000..d3d39677f880 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/setup.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-shopping-merchant-accounts" + + +description = "Google Shopping Merchant Accounts API client library" + +version = None + +with open( + os.path.join(package_root, "google/shopping/merchant_accounts/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-shopping-type >= 0.1.6, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-shopping-merchant-accounts/testing/.gitignore b/packages/google-shopping-merchant-accounts/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-shopping-merchant-accounts/testing/constraints-3.10.txt b/packages/google-shopping-merchant-accounts/testing/constraints-3.10.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-accounts/testing/constraints-3.11.txt b/packages/google-shopping-merchant-accounts/testing/constraints-3.11.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-accounts/testing/constraints-3.12.txt b/packages/google-shopping-merchant-accounts/testing/constraints-3.12.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-accounts/testing/constraints-3.7.txt b/packages/google-shopping-merchant-accounts/testing/constraints-3.7.txt new file mode 100644 index 000000000000..80917938b4b4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-accounts/testing/constraints-3.8.txt b/packages/google-shopping-merchant-accounts/testing/constraints-3.8.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-accounts/testing/constraints-3.9.txt b/packages/google-shopping-merchant-accounts/testing/constraints-3.9.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-accounts/tests/__init__.py b/packages/google-shopping-merchant-accounts/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-accounts/tests/unit/__init__.py b/packages/google-shopping-merchant-accounts/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/__init__.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/__init__.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py new file mode 100644 index 000000000000..d50859d947e3 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py @@ -0,0 +1,2943 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.type import datetime_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.account_issue_service import ( + AccountIssueServiceAsyncClient, + AccountIssueServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import accountissue + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AccountIssueServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AccountIssueServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AccountIssueServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AccountIssueServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccountIssueServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccountIssueServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert AccountIssueServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AccountIssueServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AccountIssueServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AccountIssueServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AccountIssueServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AccountIssueServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AccountIssueServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AccountIssueServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AccountIssueServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AccountIssueServiceClient._get_client_cert_source(None, False) is None + assert ( + AccountIssueServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + AccountIssueServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AccountIssueServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AccountIssueServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AccountIssueServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceClient), +) +@mock.patch.object( + AccountIssueServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AccountIssueServiceClient._DEFAULT_UNIVERSE + default_endpoint = AccountIssueServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccountIssueServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AccountIssueServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AccountIssueServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AccountIssueServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountIssueServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + AccountIssueServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == AccountIssueServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountIssueServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AccountIssueServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountIssueServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AccountIssueServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AccountIssueServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AccountIssueServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AccountIssueServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AccountIssueServiceClient._get_universe_domain(None, None) + == AccountIssueServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AccountIssueServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccountIssueServiceClient, "grpc"), + (AccountIssueServiceAsyncClient, "grpc_asyncio"), + (AccountIssueServiceClient, "rest"), + ], +) +def test_account_issue_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AccountIssueServiceGrpcTransport, "grpc"), + (transports.AccountIssueServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AccountIssueServiceRestTransport, "rest"), + ], +) +def test_account_issue_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccountIssueServiceClient, "grpc"), + (AccountIssueServiceAsyncClient, "grpc_asyncio"), + (AccountIssueServiceClient, "rest"), + ], +) +def test_account_issue_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_account_issue_service_client_get_transport_class(): + transport = AccountIssueServiceClient.get_transport_class() + available_transports = [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceRestTransport, + ] + assert transport in available_transports + + transport = AccountIssueServiceClient.get_transport_class("grpc") + assert transport == transports.AccountIssueServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + ), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AccountIssueServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceClient), +) +@mock.patch.object( + AccountIssueServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceAsyncClient), +) +def test_account_issue_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AccountIssueServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AccountIssueServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + "true", + ), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + "false", + ), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceRestTransport, + "rest", + "true", + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AccountIssueServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceClient), +) +@mock.patch.object( + AccountIssueServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_account_issue_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AccountIssueServiceClient, AccountIssueServiceAsyncClient] +) +@mock.patch.object( + AccountIssueServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccountIssueServiceClient), +) +@mock.patch.object( + AccountIssueServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccountIssueServiceAsyncClient), +) +def test_account_issue_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [AccountIssueServiceClient, AccountIssueServiceAsyncClient] +) +@mock.patch.object( + AccountIssueServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceClient), +) +@mock.patch.object( + AccountIssueServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountIssueServiceAsyncClient), +) +def test_account_issue_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AccountIssueServiceClient._DEFAULT_UNIVERSE + default_endpoint = AccountIssueServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccountIssueServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + ), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceRestTransport, + "rest", + ), + ], +) +def test_account_issue_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AccountIssueServiceClient, + transports.AccountIssueServiceRestTransport, + "rest", + None, + ), + ], +) +def test_account_issue_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_account_issue_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_issue_service.transports.AccountIssueServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AccountIssueServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccountIssueServiceClient, + transports.AccountIssueServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_account_issue_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accountissue.ListAccountIssuesRequest, + dict, + ], +) +def test_list_account_issues(request_type, transport: str = "grpc"): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accountissue.ListAccountIssuesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accountissue.ListAccountIssuesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountIssuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_account_issues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_account_issues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accountissue.ListAccountIssuesRequest() + + +def test_list_account_issues_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accountissue.ListAccountIssuesRequest( + parent="parent_value", + page_token="page_token_value", + language_code="language_code_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_account_issues(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accountissue.ListAccountIssuesRequest( + parent="parent_value", + page_token="page_token_value", + language_code="language_code_value", + ) + + +def test_list_account_issues_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_account_issues in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_account_issues + ] = mock_rpc + request = {} + client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_account_issues(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_account_issues_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accountissue.ListAccountIssuesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_account_issues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accountissue.ListAccountIssuesRequest() + + +@pytest.mark.asyncio +async def test_list_account_issues_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_account_issues + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_account_issues + ] = mock_object + + request = {} + await client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_account_issues(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_account_issues_async( + transport: str = "grpc_asyncio", request_type=accountissue.ListAccountIssuesRequest +): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accountissue.ListAccountIssuesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accountissue.ListAccountIssuesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountIssuesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_account_issues_async_from_dict(): + await test_list_account_issues_async(request_type=dict) + + +def test_list_account_issues_field_headers(): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accountissue.ListAccountIssuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + call.return_value = accountissue.ListAccountIssuesResponse() + client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_account_issues_field_headers_async(): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accountissue.ListAccountIssuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accountissue.ListAccountIssuesResponse() + ) + await client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_account_issues_flattened(): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accountissue.ListAccountIssuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_account_issues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_account_issues_flattened_error(): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_account_issues( + accountissue.ListAccountIssuesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_account_issues_flattened_async(): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accountissue.ListAccountIssuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accountissue.ListAccountIssuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_account_issues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_account_issues_flattened_error_async(): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_account_issues( + accountissue.ListAccountIssuesRequest(), + parent="parent_value", + ) + + +def test_list_account_issues_pager(transport_name: str = "grpc"): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + next_page_token="abc", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[], + next_page_token="def", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + ], + next_page_token="ghi", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_account_issues(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, accountissue.AccountIssue) for i in results) + + +def test_list_account_issues_pages(transport_name: str = "grpc"): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + next_page_token="abc", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[], + next_page_token="def", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + ], + next_page_token="ghi", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_account_issues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_account_issues_async_pager(): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + next_page_token="abc", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[], + next_page_token="def", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + ], + next_page_token="ghi", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_account_issues( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, accountissue.AccountIssue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_account_issues_async_pages(): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_issues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + next_page_token="abc", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[], + next_page_token="def", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + ], + next_page_token="ghi", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_account_issues(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + accountissue.ListAccountIssuesRequest, + dict, + ], +) +def test_list_account_issues_rest(request_type): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accountissue.ListAccountIssuesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accountissue.ListAccountIssuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_account_issues(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountIssuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_account_issues_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_account_issues in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_account_issues + ] = mock_rpc + + request = {} + client.list_account_issues(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_account_issues(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_account_issues_rest_required_fields( + request_type=accountissue.ListAccountIssuesRequest, +): + transport_class = transports.AccountIssueServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_account_issues._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_account_issues._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "language_code", + "page_size", + "page_token", + "time_zone", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = accountissue.ListAccountIssuesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = accountissue.ListAccountIssuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_account_issues(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_account_issues_rest_unset_required_fields(): + transport = transports.AccountIssueServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_account_issues._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "languageCode", + "pageSize", + "pageToken", + "timeZone", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_account_issues_rest_interceptors(null_interceptor): + transport = transports.AccountIssueServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountIssueServiceRestInterceptor(), + ) + client = AccountIssueServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountIssueServiceRestInterceptor, "post_list_account_issues" + ) as post, mock.patch.object( + transports.AccountIssueServiceRestInterceptor, "pre_list_account_issues" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = accountissue.ListAccountIssuesRequest.pb( + accountissue.ListAccountIssuesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = accountissue.ListAccountIssuesResponse.to_json( + accountissue.ListAccountIssuesResponse() + ) + + request = accountissue.ListAccountIssuesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = accountissue.ListAccountIssuesResponse() + + client.list_account_issues( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_account_issues_rest_bad_request( + transport: str = "rest", request_type=accountissue.ListAccountIssuesRequest +): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_account_issues(request) + + +def test_list_account_issues_rest_flattened(): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accountissue.ListAccountIssuesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accountissue.ListAccountIssuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_account_issues(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/issues" % client.transport._host, + args[1], + ) + + +def test_list_account_issues_rest_flattened_error(transport: str = "rest"): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_account_issues( + accountissue.ListAccountIssuesRequest(), + parent="parent_value", + ) + + +def test_list_account_issues_rest_pager(transport: str = "rest"): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + next_page_token="abc", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[], + next_page_token="def", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + ], + next_page_token="ghi", + ), + accountissue.ListAccountIssuesResponse( + account_issues=[ + accountissue.AccountIssue(), + accountissue.AccountIssue(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + accountissue.ListAccountIssuesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_account_issues(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, accountissue.AccountIssue) for i in results) + + pages = list(client.list_account_issues(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AccountIssueServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AccountIssueServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountIssueServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AccountIssueServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccountIssueServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccountIssueServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AccountIssueServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountIssueServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccountIssueServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AccountIssueServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccountIssueServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AccountIssueServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceGrpcAsyncIOTransport, + transports.AccountIssueServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AccountIssueServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AccountIssueServiceGrpcTransport, + ) + + +def test_account_issue_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AccountIssueServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_account_issue_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_issue_service.transports.AccountIssueServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AccountIssueServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("list_account_issues",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_account_issue_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_issue_service.transports.AccountIssueServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccountIssueServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_account_issue_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_issue_service.transports.AccountIssueServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccountIssueServiceTransport() + adc.assert_called_once() + + +def test_account_issue_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AccountIssueServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceGrpcAsyncIOTransport, + ], +) +def test_account_issue_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceGrpcAsyncIOTransport, + transports.AccountIssueServiceRestTransport, + ], +) +def test_account_issue_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AccountIssueServiceGrpcTransport, grpc_helpers), + (transports.AccountIssueServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_account_issue_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceGrpcAsyncIOTransport, + ], +) +def test_account_issue_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_account_issue_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AccountIssueServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_account_issue_service_host_no_port(transport_name): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_account_issue_service_host_with_port(transport_name): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_account_issue_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AccountIssueServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AccountIssueServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_account_issues._session + session2 = client2.transport.list_account_issues._session + assert session1 != session2 + + +def test_account_issue_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccountIssueServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_account_issue_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccountIssueServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceGrpcAsyncIOTransport, + ], +) +def test_account_issue_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountIssueServiceGrpcTransport, + transports.AccountIssueServiceGrpcAsyncIOTransport, + ], +) +def test_account_issue_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = AccountIssueServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = AccountIssueServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_account_path(path) + assert expected == actual + + +def test_account_issue_path(): + account = "whelk" + issue = "octopus" + expected = "accounts/{account}/issues/{issue}".format( + account=account, + issue=issue, + ) + actual = AccountIssueServiceClient.account_issue_path(account, issue) + assert expected == actual + + +def test_parse_account_issue_path(): + expected = { + "account": "oyster", + "issue": "nudibranch", + } + path = AccountIssueServiceClient.account_issue_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_account_issue_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AccountIssueServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = AccountIssueServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AccountIssueServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = AccountIssueServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AccountIssueServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = AccountIssueServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = AccountIssueServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = AccountIssueServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AccountIssueServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = AccountIssueServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AccountIssueServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AccountIssueServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AccountIssueServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AccountIssueServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AccountIssueServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AccountIssueServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AccountIssueServiceClient, transports.AccountIssueServiceGrpcTransport), + ( + AccountIssueServiceAsyncClient, + transports.AccountIssueServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py new file mode 100644 index 000000000000..244333ef0f89 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py @@ -0,0 +1,4352 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.account_tax_service import ( + AccountTaxServiceAsyncClient, + AccountTaxServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import ( + account_tax as gsma_account_tax, +) +from google.shopping.merchant_accounts_v1beta.types import account_tax +from google.shopping.merchant_accounts_v1beta.types import tax_rule + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AccountTaxServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AccountTaxServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AccountTaxServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AccountTaxServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccountTaxServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccountTaxServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert AccountTaxServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AccountTaxServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AccountTaxServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AccountTaxServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AccountTaxServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AccountTaxServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AccountTaxServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AccountTaxServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AccountTaxServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AccountTaxServiceClient._get_client_cert_source(None, False) is None + assert ( + AccountTaxServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + AccountTaxServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AccountTaxServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AccountTaxServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AccountTaxServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceClient), +) +@mock.patch.object( + AccountTaxServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AccountTaxServiceClient._DEFAULT_UNIVERSE + default_endpoint = AccountTaxServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccountTaxServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AccountTaxServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AccountTaxServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AccountTaxServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountTaxServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AccountTaxServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == AccountTaxServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountTaxServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AccountTaxServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountTaxServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AccountTaxServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AccountTaxServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AccountTaxServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AccountTaxServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AccountTaxServiceClient._get_universe_domain(None, None) + == AccountTaxServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AccountTaxServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccountTaxServiceClient, transports.AccountTaxServiceGrpcTransport, "grpc"), + (AccountTaxServiceClient, transports.AccountTaxServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccountTaxServiceClient, "grpc"), + (AccountTaxServiceAsyncClient, "grpc_asyncio"), + (AccountTaxServiceClient, "rest"), + ], +) +def test_account_tax_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AccountTaxServiceGrpcTransport, "grpc"), + (transports.AccountTaxServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AccountTaxServiceRestTransport, "rest"), + ], +) +def test_account_tax_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccountTaxServiceClient, "grpc"), + (AccountTaxServiceAsyncClient, "grpc_asyncio"), + (AccountTaxServiceClient, "rest"), + ], +) +def test_account_tax_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_account_tax_service_client_get_transport_class(): + transport = AccountTaxServiceClient.get_transport_class() + available_transports = [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceRestTransport, + ] + assert transport in available_transports + + transport = AccountTaxServiceClient.get_transport_class("grpc") + assert transport == transports.AccountTaxServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccountTaxServiceClient, transports.AccountTaxServiceGrpcTransport, "grpc"), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AccountTaxServiceClient, transports.AccountTaxServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + AccountTaxServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceClient), +) +@mock.patch.object( + AccountTaxServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceAsyncClient), +) +def test_account_tax_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AccountTaxServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AccountTaxServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AccountTaxServiceClient, + transports.AccountTaxServiceGrpcTransport, + "grpc", + "true", + ), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AccountTaxServiceClient, + transports.AccountTaxServiceGrpcTransport, + "grpc", + "false", + ), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AccountTaxServiceClient, + transports.AccountTaxServiceRestTransport, + "rest", + "true", + ), + ( + AccountTaxServiceClient, + transports.AccountTaxServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AccountTaxServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceClient), +) +@mock.patch.object( + AccountTaxServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_account_tax_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AccountTaxServiceClient, AccountTaxServiceAsyncClient] +) +@mock.patch.object( + AccountTaxServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccountTaxServiceClient), +) +@mock.patch.object( + AccountTaxServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccountTaxServiceAsyncClient), +) +def test_account_tax_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [AccountTaxServiceClient, AccountTaxServiceAsyncClient] +) +@mock.patch.object( + AccountTaxServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceClient), +) +@mock.patch.object( + AccountTaxServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountTaxServiceAsyncClient), +) +def test_account_tax_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AccountTaxServiceClient._DEFAULT_UNIVERSE + default_endpoint = AccountTaxServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccountTaxServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccountTaxServiceClient, transports.AccountTaxServiceGrpcTransport, "grpc"), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AccountTaxServiceClient, transports.AccountTaxServiceRestTransport, "rest"), + ], +) +def test_account_tax_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccountTaxServiceClient, + transports.AccountTaxServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AccountTaxServiceClient, + transports.AccountTaxServiceRestTransport, + "rest", + None, + ), + ], +) +def test_account_tax_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_account_tax_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_tax_service.transports.AccountTaxServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AccountTaxServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccountTaxServiceClient, + transports.AccountTaxServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_account_tax_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + account_tax.GetAccountTaxRequest, + dict, + ], +) +def test_get_account_tax(request_type, transport: str = "grpc"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = account_tax.AccountTax( + name="name_value", + account=749, + ) + response = client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = account_tax.GetAccountTaxRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, account_tax.AccountTax) + assert response.name == "name_value" + assert response.account == 749 + + +def test_get_account_tax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_account_tax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == account_tax.GetAccountTaxRequest() + + +def test_get_account_tax_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = account_tax.GetAccountTaxRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_account_tax(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == account_tax.GetAccountTaxRequest( + name="name_value", + ) + + +def test_get_account_tax_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_account_tax in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_account_tax] = mock_rpc + request = {} + client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_account_tax_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.AccountTax( + name="name_value", + account=749, + ) + ) + response = await client.get_account_tax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == account_tax.GetAccountTaxRequest() + + +@pytest.mark.asyncio +async def test_get_account_tax_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_account_tax + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_account_tax + ] = mock_object + + request = {} + await client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_account_tax_async( + transport: str = "grpc_asyncio", request_type=account_tax.GetAccountTaxRequest +): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.AccountTax( + name="name_value", + account=749, + ) + ) + response = await client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = account_tax.GetAccountTaxRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, account_tax.AccountTax) + assert response.name == "name_value" + assert response.account == 749 + + +@pytest.mark.asyncio +async def test_get_account_tax_async_from_dict(): + await test_get_account_tax_async(request_type=dict) + + +def test_get_account_tax_field_headers(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = account_tax.GetAccountTaxRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + call.return_value = account_tax.AccountTax() + client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_account_tax_field_headers_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = account_tax.GetAccountTaxRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.AccountTax() + ) + await client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_account_tax_flattened(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = account_tax.AccountTax() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_account_tax( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_account_tax_flattened_error(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_account_tax( + account_tax.GetAccountTaxRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_account_tax_flattened_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = account_tax.AccountTax() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.AccountTax() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_account_tax( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_account_tax_flattened_error_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_account_tax( + account_tax.GetAccountTaxRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + account_tax.ListAccountTaxRequest, + dict, + ], +) +def test_list_account_tax(request_type, transport: str = "grpc"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = account_tax.ListAccountTaxResponse( + next_page_token="next_page_token_value", + ) + response = client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = account_tax.ListAccountTaxRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountTaxPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_account_tax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_account_tax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == account_tax.ListAccountTaxRequest() + + +def test_list_account_tax_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = account_tax.ListAccountTaxRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_account_tax(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == account_tax.ListAccountTaxRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_account_tax_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_account_tax in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_account_tax + ] = mock_rpc + request = {} + client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_account_tax_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.ListAccountTaxResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_account_tax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == account_tax.ListAccountTaxRequest() + + +@pytest.mark.asyncio +async def test_list_account_tax_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_account_tax + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_account_tax + ] = mock_object + + request = {} + await client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_account_tax_async( + transport: str = "grpc_asyncio", request_type=account_tax.ListAccountTaxRequest +): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.ListAccountTaxResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = account_tax.ListAccountTaxRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountTaxAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_account_tax_async_from_dict(): + await test_list_account_tax_async(request_type=dict) + + +def test_list_account_tax_field_headers(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = account_tax.ListAccountTaxRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + call.return_value = account_tax.ListAccountTaxResponse() + client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_account_tax_field_headers_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = account_tax.ListAccountTaxRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.ListAccountTaxResponse() + ) + await client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_account_tax_flattened(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = account_tax.ListAccountTaxResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_account_tax( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_account_tax_flattened_error(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_account_tax( + account_tax.ListAccountTaxRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_account_tax_flattened_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = account_tax.ListAccountTaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + account_tax.ListAccountTaxResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_account_tax( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_account_tax_flattened_error_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_account_tax( + account_tax.ListAccountTaxRequest(), + parent="parent_value", + ) + + +def test_list_account_tax_pager(transport_name: str = "grpc"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + next_page_token="abc", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[], + next_page_token="def", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + ], + next_page_token="ghi", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_account_tax(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, account_tax.AccountTax) for i in results) + + +def test_list_account_tax_pages(transport_name: str = "grpc"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_account_tax), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + next_page_token="abc", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[], + next_page_token="def", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + ], + next_page_token="ghi", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + ), + RuntimeError, + ) + pages = list(client.list_account_tax(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_account_tax_async_pager(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_tax), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + next_page_token="abc", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[], + next_page_token="def", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + ], + next_page_token="ghi", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_account_tax( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, account_tax.AccountTax) for i in responses) + + +@pytest.mark.asyncio +async def test_list_account_tax_async_pages(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_account_tax), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + next_page_token="abc", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[], + next_page_token="def", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + ], + next_page_token="ghi", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_account_tax(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_account_tax.UpdateAccountTaxRequest, + dict, + ], +) +def test_update_account_tax(request_type, transport: str = "grpc"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_account_tax.AccountTax( + name="name_value", + account=749, + ) + response = client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsma_account_tax.UpdateAccountTaxRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_account_tax.AccountTax) + assert response.name == "name_value" + assert response.account == 749 + + +def test_update_account_tax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_account_tax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_account_tax.UpdateAccountTaxRequest() + + +def test_update_account_tax_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsma_account_tax.UpdateAccountTaxRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_account_tax(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_account_tax.UpdateAccountTaxRequest() + + +def test_update_account_tax_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_account_tax in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_account_tax + ] = mock_rpc + request = {} + client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_account_tax_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_account_tax.AccountTax( + name="name_value", + account=749, + ) + ) + response = await client.update_account_tax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_account_tax.UpdateAccountTaxRequest() + + +@pytest.mark.asyncio +async def test_update_account_tax_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_account_tax + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_account_tax + ] = mock_object + + request = {} + await client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_account_tax_async( + transport: str = "grpc_asyncio", + request_type=gsma_account_tax.UpdateAccountTaxRequest, +): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_account_tax.AccountTax( + name="name_value", + account=749, + ) + ) + response = await client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsma_account_tax.UpdateAccountTaxRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_account_tax.AccountTax) + assert response.name == "name_value" + assert response.account == 749 + + +@pytest.mark.asyncio +async def test_update_account_tax_async_from_dict(): + await test_update_account_tax_async(request_type=dict) + + +def test_update_account_tax_field_headers(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_account_tax.UpdateAccountTaxRequest() + + request.account_tax.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + call.return_value = gsma_account_tax.AccountTax() + client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "account_tax.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_account_tax_field_headers_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_account_tax.UpdateAccountTaxRequest() + + request.account_tax.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_account_tax.AccountTax() + ) + await client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "account_tax.name=name_value", + ) in kw["metadata"] + + +def test_update_account_tax_flattened(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_account_tax.AccountTax() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_account_tax( + account_tax=gsma_account_tax.AccountTax(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].account_tax + mock_val = gsma_account_tax.AccountTax(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_account_tax_flattened_error(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_account_tax( + gsma_account_tax.UpdateAccountTaxRequest(), + account_tax=gsma_account_tax.AccountTax(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_account_tax_flattened_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_account_tax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_account_tax.AccountTax() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_account_tax.AccountTax() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_account_tax( + account_tax=gsma_account_tax.AccountTax(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].account_tax + mock_val = gsma_account_tax.AccountTax(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_account_tax_flattened_error_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_account_tax( + gsma_account_tax.UpdateAccountTaxRequest(), + account_tax=gsma_account_tax.AccountTax(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + account_tax.GetAccountTaxRequest, + dict, + ], +) +def test_get_account_tax_rest(request_type): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/accounttax/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = account_tax.AccountTax( + name="name_value", + account=749, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = account_tax.AccountTax.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_account_tax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, account_tax.AccountTax) + assert response.name == "name_value" + assert response.account == 749 + + +def test_get_account_tax_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_account_tax in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_account_tax] = mock_rpc + + request = {} + client.get_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_account_tax_rest_required_fields( + request_type=account_tax.GetAccountTaxRequest, +): + transport_class = transports.AccountTaxServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_account_tax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_account_tax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = account_tax.AccountTax() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = account_tax.AccountTax.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_account_tax(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_account_tax_rest_unset_required_fields(): + transport = transports.AccountTaxServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_account_tax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_account_tax_rest_interceptors(null_interceptor): + transport = transports.AccountTaxServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountTaxServiceRestInterceptor(), + ) + client = AccountTaxServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountTaxServiceRestInterceptor, "post_get_account_tax" + ) as post, mock.patch.object( + transports.AccountTaxServiceRestInterceptor, "pre_get_account_tax" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = account_tax.GetAccountTaxRequest.pb( + account_tax.GetAccountTaxRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = account_tax.AccountTax.to_json( + account_tax.AccountTax() + ) + + request = account_tax.GetAccountTaxRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = account_tax.AccountTax() + + client.get_account_tax( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_account_tax_rest_bad_request( + transport: str = "rest", request_type=account_tax.GetAccountTaxRequest +): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/accounttax/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_account_tax(request) + + +def test_get_account_tax_rest_flattened(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = account_tax.AccountTax() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/accounttax/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = account_tax.AccountTax.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_account_tax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/accounttax/*}" + % client.transport._host, + args[1], + ) + + +def test_get_account_tax_rest_flattened_error(transport: str = "rest"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_account_tax( + account_tax.GetAccountTaxRequest(), + name="name_value", + ) + + +def test_get_account_tax_rest_error(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + account_tax.ListAccountTaxRequest, + dict, + ], +) +def test_list_account_tax_rest(request_type): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = account_tax.ListAccountTaxResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = account_tax.ListAccountTaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_account_tax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountTaxPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_account_tax_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_account_tax in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_account_tax + ] = mock_rpc + + request = {} + client.list_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_account_tax_rest_required_fields( + request_type=account_tax.ListAccountTaxRequest, +): + transport_class = transports.AccountTaxServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_account_tax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_account_tax._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = account_tax.ListAccountTaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = account_tax.ListAccountTaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_account_tax(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_account_tax_rest_unset_required_fields(): + transport = transports.AccountTaxServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_account_tax._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_account_tax_rest_interceptors(null_interceptor): + transport = transports.AccountTaxServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountTaxServiceRestInterceptor(), + ) + client = AccountTaxServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountTaxServiceRestInterceptor, "post_list_account_tax" + ) as post, mock.patch.object( + transports.AccountTaxServiceRestInterceptor, "pre_list_account_tax" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = account_tax.ListAccountTaxRequest.pb( + account_tax.ListAccountTaxRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = account_tax.ListAccountTaxResponse.to_json( + account_tax.ListAccountTaxResponse() + ) + + request = account_tax.ListAccountTaxRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = account_tax.ListAccountTaxResponse() + + client.list_account_tax( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_account_tax_rest_bad_request( + transport: str = "rest", request_type=account_tax.ListAccountTaxRequest +): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_account_tax(request) + + +def test_list_account_tax_rest_flattened(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = account_tax.ListAccountTaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = account_tax.ListAccountTaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_account_tax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/accounttax" + % client.transport._host, + args[1], + ) + + +def test_list_account_tax_rest_flattened_error(transport: str = "rest"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_account_tax( + account_tax.ListAccountTaxRequest(), + parent="parent_value", + ) + + +def test_list_account_tax_rest_pager(transport: str = "rest"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + next_page_token="abc", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[], + next_page_token="def", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + ], + next_page_token="ghi", + ), + account_tax.ListAccountTaxResponse( + account_taxes=[ + account_tax.AccountTax(), + account_tax.AccountTax(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + account_tax.ListAccountTaxResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_account_tax(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, account_tax.AccountTax) for i in results) + + pages = list(client.list_account_tax(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_account_tax.UpdateAccountTaxRequest, + dict, + ], +) +def test_update_account_tax_rest(request_type): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"account_tax": {"name": "accounts/sample1/accounttax/sample2"}} + request_init["account_tax"] = { + "name": "accounts/sample1/accounttax/sample2", + "account": 749, + "tax_rules": [ + { + "location_id": 1157, + "post_code_range": {"start": "start_value", "end": "end_value"}, + "use_google_rate": True, + "self_specified_rate_micros": 2732, + "region_code": "region_code_value", + "shipping_taxed": True, + "effective_time_period": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsma_account_tax.UpdateAccountTaxRequest.meta.fields["account_tax"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["account_tax"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["account_tax"][field])): + del request_init["account_tax"][field][i][subfield] + else: + del request_init["account_tax"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_account_tax.AccountTax( + name="name_value", + account=749, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_account_tax.AccountTax.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_account_tax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_account_tax.AccountTax) + assert response.name == "name_value" + assert response.account == 749 + + +def test_update_account_tax_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_account_tax in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_account_tax + ] = mock_rpc + + request = {} + client.update_account_tax(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_account_tax(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_account_tax_rest_required_fields( + request_type=gsma_account_tax.UpdateAccountTaxRequest, +): + transport_class = transports.AccountTaxServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_account_tax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_account_tax._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsma_account_tax.AccountTax() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsma_account_tax.AccountTax.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_account_tax(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_account_tax_rest_unset_required_fields(): + transport = transports.AccountTaxServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_account_tax._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("accountTax",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_account_tax_rest_interceptors(null_interceptor): + transport = transports.AccountTaxServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountTaxServiceRestInterceptor(), + ) + client = AccountTaxServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountTaxServiceRestInterceptor, "post_update_account_tax" + ) as post, mock.patch.object( + transports.AccountTaxServiceRestInterceptor, "pre_update_account_tax" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsma_account_tax.UpdateAccountTaxRequest.pb( + gsma_account_tax.UpdateAccountTaxRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsma_account_tax.AccountTax.to_json( + gsma_account_tax.AccountTax() + ) + + request = gsma_account_tax.UpdateAccountTaxRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsma_account_tax.AccountTax() + + client.update_account_tax( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_account_tax_rest_bad_request( + transport: str = "rest", request_type=gsma_account_tax.UpdateAccountTaxRequest +): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"account_tax": {"name": "accounts/sample1/accounttax/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_account_tax(request) + + +def test_update_account_tax_rest_flattened(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_account_tax.AccountTax() + + # get arguments that satisfy an http rule for this method + sample_request = { + "account_tax": {"name": "accounts/sample1/accounttax/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + account_tax=gsma_account_tax.AccountTax(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_account_tax.AccountTax.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_account_tax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{account_tax.name=accounts/*/accounttax/*}" + % client.transport._host, + args[1], + ) + + +def test_update_account_tax_rest_flattened_error(transport: str = "rest"): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_account_tax( + gsma_account_tax.UpdateAccountTaxRequest(), + account_tax=gsma_account_tax.AccountTax(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_account_tax_rest_error(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AccountTaxServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AccountTaxServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountTaxServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AccountTaxServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccountTaxServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccountTaxServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AccountTaxServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountTaxServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccountTaxServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AccountTaxServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccountTaxServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AccountTaxServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceGrpcAsyncIOTransport, + transports.AccountTaxServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AccountTaxServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AccountTaxServiceGrpcTransport, + ) + + +def test_account_tax_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AccountTaxServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_account_tax_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_tax_service.transports.AccountTaxServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AccountTaxServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_account_tax", + "list_account_tax", + "update_account_tax", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_account_tax_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_tax_service.transports.AccountTaxServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccountTaxServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_account_tax_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.account_tax_service.transports.AccountTaxServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccountTaxServiceTransport() + adc.assert_called_once() + + +def test_account_tax_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AccountTaxServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceGrpcAsyncIOTransport, + ], +) +def test_account_tax_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceGrpcAsyncIOTransport, + transports.AccountTaxServiceRestTransport, + ], +) +def test_account_tax_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AccountTaxServiceGrpcTransport, grpc_helpers), + (transports.AccountTaxServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_account_tax_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceGrpcAsyncIOTransport, + ], +) +def test_account_tax_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_account_tax_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AccountTaxServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_account_tax_service_host_no_port(transport_name): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_account_tax_service_host_with_port(transport_name): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_account_tax_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AccountTaxServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AccountTaxServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_account_tax._session + session2 = client2.transport.get_account_tax._session + assert session1 != session2 + session1 = client1.transport.list_account_tax._session + session2 = client2.transport.list_account_tax._session + assert session1 != session2 + session1 = client1.transport.update_account_tax._session + session2 = client2.transport.update_account_tax._session + assert session1 != session2 + + +def test_account_tax_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccountTaxServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_account_tax_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccountTaxServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceGrpcAsyncIOTransport, + ], +) +def test_account_tax_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountTaxServiceGrpcTransport, + transports.AccountTaxServiceGrpcAsyncIOTransport, + ], +) +def test_account_tax_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_tax_path(): + account = "squid" + tax = "clam" + expected = "accounts/{account}/accounttax/{tax}".format( + account=account, + tax=tax, + ) + actual = AccountTaxServiceClient.account_tax_path(account, tax) + assert expected == actual + + +def test_parse_account_tax_path(): + expected = { + "account": "whelk", + "tax": "octopus", + } + path = AccountTaxServiceClient.account_tax_path(**expected) + + # Check that the path construction is reversible. + actual = AccountTaxServiceClient.parse_account_tax_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AccountTaxServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = AccountTaxServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AccountTaxServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AccountTaxServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = AccountTaxServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AccountTaxServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AccountTaxServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = AccountTaxServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AccountTaxServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = AccountTaxServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = AccountTaxServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AccountTaxServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AccountTaxServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = AccountTaxServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AccountTaxServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AccountTaxServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AccountTaxServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AccountTaxServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AccountTaxServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AccountTaxServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AccountTaxServiceClient, transports.AccountTaxServiceGrpcTransport), + ( + AccountTaxServiceAsyncClient, + transports.AccountTaxServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py new file mode 100644 index 000000000000..b81c03e87874 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py @@ -0,0 +1,6193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.type import datetime_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.accounts_service import ( + AccountsServiceAsyncClient, + AccountsServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import accessright, accounts, user + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AccountsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AccountsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AccountsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AccountsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccountsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccountsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert AccountsServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AccountsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AccountsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AccountsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AccountsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AccountsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AccountsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AccountsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AccountsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AccountsServiceClient._get_client_cert_source(None, False) is None + assert ( + AccountsServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + AccountsServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AccountsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AccountsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AccountsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceClient), +) +@mock.patch.object( + AccountsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AccountsServiceClient._DEFAULT_UNIVERSE + default_endpoint = AccountsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccountsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AccountsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AccountsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AccountsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AccountsServiceClient._get_api_endpoint(None, None, default_universe, "always") + == AccountsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AccountsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccountsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AccountsServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AccountsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AccountsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AccountsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AccountsServiceClient._get_universe_domain(None, None) + == AccountsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AccountsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccountsServiceClient, transports.AccountsServiceGrpcTransport, "grpc"), + (AccountsServiceClient, transports.AccountsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccountsServiceClient, "grpc"), + (AccountsServiceAsyncClient, "grpc_asyncio"), + (AccountsServiceClient, "rest"), + ], +) +def test_accounts_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AccountsServiceGrpcTransport, "grpc"), + (transports.AccountsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AccountsServiceRestTransport, "rest"), + ], +) +def test_accounts_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccountsServiceClient, "grpc"), + (AccountsServiceAsyncClient, "grpc_asyncio"), + (AccountsServiceClient, "rest"), + ], +) +def test_accounts_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_accounts_service_client_get_transport_class(): + transport = AccountsServiceClient.get_transport_class() + available_transports = [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceRestTransport, + ] + assert transport in available_transports + + transport = AccountsServiceClient.get_transport_class("grpc") + assert transport == transports.AccountsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccountsServiceClient, transports.AccountsServiceGrpcTransport, "grpc"), + ( + AccountsServiceAsyncClient, + transports.AccountsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AccountsServiceClient, transports.AccountsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + AccountsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceClient), +) +@mock.patch.object( + AccountsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceAsyncClient), +) +def test_accounts_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AccountsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AccountsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AccountsServiceClient, + transports.AccountsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AccountsServiceAsyncClient, + transports.AccountsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AccountsServiceClient, + transports.AccountsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AccountsServiceAsyncClient, + transports.AccountsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AccountsServiceClient, + transports.AccountsServiceRestTransport, + "rest", + "true", + ), + ( + AccountsServiceClient, + transports.AccountsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AccountsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceClient), +) +@mock.patch.object( + AccountsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_accounts_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AccountsServiceClient, AccountsServiceAsyncClient] +) +@mock.patch.object( + AccountsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccountsServiceClient), +) +@mock.patch.object( + AccountsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccountsServiceAsyncClient), +) +def test_accounts_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [AccountsServiceClient, AccountsServiceAsyncClient] +) +@mock.patch.object( + AccountsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceClient), +) +@mock.patch.object( + AccountsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccountsServiceAsyncClient), +) +def test_accounts_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AccountsServiceClient._DEFAULT_UNIVERSE + default_endpoint = AccountsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccountsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccountsServiceClient, transports.AccountsServiceGrpcTransport, "grpc"), + ( + AccountsServiceAsyncClient, + transports.AccountsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AccountsServiceClient, transports.AccountsServiceRestTransport, "rest"), + ], +) +def test_accounts_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccountsServiceClient, + transports.AccountsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccountsServiceAsyncClient, + transports.AccountsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AccountsServiceClient, transports.AccountsServiceRestTransport, "rest", None), + ], +) +def test_accounts_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_accounts_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.accounts_service.transports.AccountsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AccountsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccountsServiceClient, + transports.AccountsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccountsServiceAsyncClient, + transports.AccountsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_accounts_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.GetAccountRequest, + dict, + ], +) +def test_get_account(request_type, transport: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + response = client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accounts.GetAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +def test_get_account_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.GetAccountRequest() + + +def test_get_account_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accounts.GetAccountRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_account(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.GetAccountRequest( + name="name_value", + ) + + +def test_get_account_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_account] = mock_rpc + request = {} + client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_account_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + ) + response = await client.get_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.GetAccountRequest() + + +@pytest.mark.asyncio +async def test_get_account_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_account + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_account + ] = mock_object + + request = {} + await client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_account_async( + transport: str = "grpc_asyncio", request_type=accounts.GetAccountRequest +): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + ) + response = await client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accounts.GetAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +@pytest.mark.asyncio +async def test_get_account_async_from_dict(): + await test_get_account_async(request_type=dict) + + +def test_get_account_field_headers(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.GetAccountRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + call.return_value = accounts.Account() + client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_account_field_headers_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.GetAccountRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(accounts.Account()) + await client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_account_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_account( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_account_flattened_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_account( + accounts.GetAccountRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_account_flattened_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(accounts.Account()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_account( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_account_flattened_error_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_account( + accounts.GetAccountRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.CreateAndConfigureAccountRequest, + dict, + ], +) +def test_create_and_configure_account(request_type, transport: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_and_configure_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + response = client.create_and_configure_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accounts.CreateAndConfigureAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +def test_create_and_configure_account_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_and_configure_account), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_and_configure_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.CreateAndConfigureAccountRequest() + + +def test_create_and_configure_account_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accounts.CreateAndConfigureAccountRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_and_configure_account), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_and_configure_account(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.CreateAndConfigureAccountRequest() + + +def test_create_and_configure_account_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_and_configure_account + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_and_configure_account + ] = mock_rpc + request = {} + client.create_and_configure_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_and_configure_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_and_configure_account_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_and_configure_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + ) + response = await client.create_and_configure_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.CreateAndConfigureAccountRequest() + + +@pytest.mark.asyncio +async def test_create_and_configure_account_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_and_configure_account + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_and_configure_account + ] = mock_object + + request = {} + await client.create_and_configure_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_and_configure_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_and_configure_account_async( + transport: str = "grpc_asyncio", + request_type=accounts.CreateAndConfigureAccountRequest, +): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_and_configure_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + ) + response = await client.create_and_configure_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accounts.CreateAndConfigureAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +@pytest.mark.asyncio +async def test_create_and_configure_account_async_from_dict(): + await test_create_and_configure_account_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.DeleteAccountRequest, + dict, + ], +) +def test_delete_account(request_type, transport: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accounts.DeleteAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_account_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.DeleteAccountRequest() + + +def test_delete_account_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accounts.DeleteAccountRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_account(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.DeleteAccountRequest( + name="name_value", + ) + + +def test_delete_account_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_account] = mock_rpc + request = {} + client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_account_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.DeleteAccountRequest() + + +@pytest.mark.asyncio +async def test_delete_account_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_account + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_account + ] = mock_object + + request = {} + await client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_account_async( + transport: str = "grpc_asyncio", request_type=accounts.DeleteAccountRequest +): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accounts.DeleteAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_account_async_from_dict(): + await test_delete_account_async(request_type=dict) + + +def test_delete_account_field_headers(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.DeleteAccountRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + call.return_value = None + client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_account_field_headers_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.DeleteAccountRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_account_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_account( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_account_flattened_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_account( + accounts.DeleteAccountRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_account_flattened_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_account( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_account_flattened_error_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_account( + accounts.DeleteAccountRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.UpdateAccountRequest, + dict, + ], +) +def test_update_account(request_type, transport: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + response = client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accounts.UpdateAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +def test_update_account_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.UpdateAccountRequest() + + +def test_update_account_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accounts.UpdateAccountRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_account(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.UpdateAccountRequest() + + +def test_update_account_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_account] = mock_rpc + request = {} + client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_account_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + ) + response = await client.update_account() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.UpdateAccountRequest() + + +@pytest.mark.asyncio +async def test_update_account_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_account + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_account + ] = mock_object + + request = {} + await client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_account_async( + transport: str = "grpc_asyncio", request_type=accounts.UpdateAccountRequest +): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + ) + response = await client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accounts.UpdateAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +@pytest.mark.asyncio +async def test_update_account_async_from_dict(): + await test_update_account_async(request_type=dict) + + +def test_update_account_field_headers(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.UpdateAccountRequest() + + request.account.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + call.return_value = accounts.Account() + client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "account.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_account_field_headers_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.UpdateAccountRequest() + + request.account.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(accounts.Account()) + await client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "account.name=name_value", + ) in kw["metadata"] + + +def test_update_account_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_account( + account=accounts.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].account + mock_val = accounts.Account(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_account_flattened_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_account( + accounts.UpdateAccountRequest(), + account=accounts.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_account_flattened_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_account), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.Account() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(accounts.Account()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_account( + account=accounts.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].account + mock_val = accounts.Account(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_account_flattened_error_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_account( + accounts.UpdateAccountRequest(), + account=accounts.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.ListAccountsRequest, + dict, + ], +) +def test_list_accounts(request_type, transport: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.ListAccountsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accounts.ListAccountsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_accounts_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_accounts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.ListAccountsRequest() + + +def test_list_accounts_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accounts.ListAccountsRequest( + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_accounts(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.ListAccountsRequest( + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_accounts_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_accounts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_accounts] = mock_rpc + request = {} + client.list_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_accounts_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.ListAccountsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_accounts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.ListAccountsRequest() + + +@pytest.mark.asyncio +async def test_list_accounts_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_accounts + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_accounts + ] = mock_object + + request = {} + await client.list_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_accounts_async( + transport: str = "grpc_asyncio", request_type=accounts.ListAccountsRequest +): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.ListAccountsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accounts.ListAccountsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_accounts_async_from_dict(): + await test_list_accounts_async(request_type=dict) + + +def test_list_accounts_pager(transport_name: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + pager = client.list_accounts(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, accounts.Account) for i in results) + + +def test_list_accounts_pages(transport_name: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + pages = list(client.list_accounts(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_accounts_async_pager(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_accounts), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_accounts( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, accounts.Account) for i in responses) + + +@pytest.mark.asyncio +async def test_list_accounts_async_pages(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_accounts), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_accounts(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.ListSubAccountsRequest, + dict, + ], +) +def test_list_sub_accounts(request_type, transport: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.ListSubAccountsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = accounts.ListSubAccountsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubAccountsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_sub_accounts_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sub_accounts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.ListSubAccountsRequest() + + +def test_list_sub_accounts_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = accounts.ListSubAccountsRequest( + provider="provider_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sub_accounts(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.ListSubAccountsRequest( + provider="provider_value", + page_token="page_token_value", + ) + + +def test_list_sub_accounts_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sub_accounts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_sub_accounts + ] = mock_rpc + request = {} + client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sub_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sub_accounts_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.ListSubAccountsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_sub_accounts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == accounts.ListSubAccountsRequest() + + +@pytest.mark.asyncio +async def test_list_sub_accounts_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sub_accounts + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sub_accounts + ] = mock_object + + request = {} + await client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sub_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sub_accounts_async( + transport: str = "grpc_asyncio", request_type=accounts.ListSubAccountsRequest +): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.ListSubAccountsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = accounts.ListSubAccountsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubAccountsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_sub_accounts_async_from_dict(): + await test_list_sub_accounts_async(request_type=dict) + + +def test_list_sub_accounts_field_headers(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.ListSubAccountsRequest() + + request.provider = "provider_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + call.return_value = accounts.ListSubAccountsResponse() + client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "provider=provider_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sub_accounts_field_headers_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = accounts.ListSubAccountsRequest() + + request.provider = "provider_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.ListSubAccountsResponse() + ) + await client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "provider=provider_value", + ) in kw["metadata"] + + +def test_list_sub_accounts_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.ListSubAccountsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sub_accounts( + provider="provider_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].provider + mock_val = "provider_value" + assert arg == mock_val + + +def test_list_sub_accounts_flattened_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sub_accounts( + accounts.ListSubAccountsRequest(), + provider="provider_value", + ) + + +@pytest.mark.asyncio +async def test_list_sub_accounts_flattened_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = accounts.ListSubAccountsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + accounts.ListSubAccountsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sub_accounts( + provider="provider_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].provider + mock_val = "provider_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sub_accounts_flattened_error_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sub_accounts( + accounts.ListSubAccountsRequest(), + provider="provider_value", + ) + + +def test_list_sub_accounts_pager(transport_name: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListSubAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("provider", ""),)), + ) + pager = client.list_sub_accounts(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, accounts.Account) for i in results) + + +def test_list_sub_accounts_pages(transport_name: str = "grpc"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListSubAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sub_accounts(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sub_accounts_async_pager(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListSubAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sub_accounts( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, accounts.Account) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sub_accounts_async_pages(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sub_accounts), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListSubAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sub_accounts(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.GetAccountRequest, + dict, + ], +) +def test_get_account_rest(request_type): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +def test_get_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_account] = mock_rpc + + request = {} + client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_account_rest_required_fields(request_type=accounts.GetAccountRequest): + transport_class = transports.AccountsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = accounts.Account() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_account_rest_unset_required_fields(): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_account_rest_interceptors(null_interceptor): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountsServiceRestInterceptor(), + ) + client = AccountsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountsServiceRestInterceptor, "post_get_account" + ) as post, mock.patch.object( + transports.AccountsServiceRestInterceptor, "pre_get_account" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = accounts.GetAccountRequest.pb(accounts.GetAccountRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = accounts.Account.to_json(accounts.Account()) + + request = accounts.GetAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = accounts.Account() + + client.get_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_account_rest_bad_request( + transport: str = "rest", request_type=accounts.GetAccountRequest +): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_account(request) + + +def test_get_account_rest_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.Account() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*}" % client.transport._host, args[1] + ) + + +def test_get_account_rest_flattened_error(transport: str = "rest"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_account( + accounts.GetAccountRequest(), + name="name_value", + ) + + +def test_get_account_rest_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.CreateAndConfigureAccountRequest, + dict, + ], +) +def test_create_and_configure_account_rest(request_type): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_and_configure_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +def test_create_and_configure_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_and_configure_account + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_and_configure_account + ] = mock_rpc + + request = {} + client.create_and_configure_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_and_configure_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_and_configure_account_rest_required_fields( + request_type=accounts.CreateAndConfigureAccountRequest, +): + transport_class = transports.AccountsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_and_configure_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_and_configure_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = accounts.Account() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_and_configure_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_and_configure_account_rest_unset_required_fields(): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_and_configure_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("account",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_and_configure_account_rest_interceptors(null_interceptor): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountsServiceRestInterceptor(), + ) + client = AccountsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountsServiceRestInterceptor, "post_create_and_configure_account" + ) as post, mock.patch.object( + transports.AccountsServiceRestInterceptor, "pre_create_and_configure_account" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = accounts.CreateAndConfigureAccountRequest.pb( + accounts.CreateAndConfigureAccountRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = accounts.Account.to_json(accounts.Account()) + + request = accounts.CreateAndConfigureAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = accounts.Account() + + client.create_and_configure_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_and_configure_account_rest_bad_request( + transport: str = "rest", request_type=accounts.CreateAndConfigureAccountRequest +): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_and_configure_account(request) + + +def test_create_and_configure_account_rest_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.DeleteAccountRequest, + dict, + ], +) +def test_delete_account_rest(request_type): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_account(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_account] = mock_rpc + + request = {} + client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_account_rest_required_fields( + request_type=accounts.DeleteAccountRequest, +): + transport_class = transports.AccountsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_account_rest_unset_required_fields(): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_account_rest_interceptors(null_interceptor): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountsServiceRestInterceptor(), + ) + client = AccountsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountsServiceRestInterceptor, "pre_delete_account" + ) as pre: + pre.assert_not_called() + pb_message = accounts.DeleteAccountRequest.pb(accounts.DeleteAccountRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = accounts.DeleteAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_account_rest_bad_request( + transport: str = "rest", request_type=accounts.DeleteAccountRequest +): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_account(request) + + +def test_delete_account_rest_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*}" % client.transport._host, args[1] + ) + + +def test_delete_account_rest_flattened_error(transport: str = "rest"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_account( + accounts.DeleteAccountRequest(), + name="name_value", + ) + + +def test_delete_account_rest_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.UpdateAccountRequest, + dict, + ], +) +def test_update_account_rest(request_type): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"account": {"name": "accounts/sample1"}} + request_init["account"] = { + "name": "accounts/sample1", + "account_id": 1049, + "account_name": "account_name_value", + "adult_content": True, + "test_account": True, + "time_zone": {"id": "id_value", "version": "version_value"}, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = accounts.UpdateAccountRequest.meta.fields["account"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["account"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["account"][field])): + del request_init["account"][field][i][subfield] + else: + del request_init["account"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.Account( + name="name_value", + account_id=1049, + account_name="account_name_value", + adult_content=True, + test_account=True, + language_code="language_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, accounts.Account) + assert response.name == "name_value" + assert response.account_id == 1049 + assert response.account_name == "account_name_value" + assert response.adult_content is True + assert response.test_account is True + assert response.language_code == "language_code_value" + + +def test_update_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_account] = mock_rpc + + request = {} + client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_account_rest_required_fields( + request_type=accounts.UpdateAccountRequest, +): + transport_class = transports.AccountsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_account._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = accounts.Account() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_account_rest_unset_required_fields(): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_account._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "account", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_account_rest_interceptors(null_interceptor): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountsServiceRestInterceptor(), + ) + client = AccountsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountsServiceRestInterceptor, "post_update_account" + ) as post, mock.patch.object( + transports.AccountsServiceRestInterceptor, "pre_update_account" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = accounts.UpdateAccountRequest.pb(accounts.UpdateAccountRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = accounts.Account.to_json(accounts.Account()) + + request = accounts.UpdateAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = accounts.Account() + + client.update_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_account_rest_bad_request( + transport: str = "rest", request_type=accounts.UpdateAccountRequest +): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"account": {"name": "accounts/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_account(request) + + +def test_update_account_rest_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.Account() + + # get arguments that satisfy an http rule for this method + sample_request = {"account": {"name": "accounts/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + account=accounts.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{account.name=accounts/*}" % client.transport._host, + args[1], + ) + + +def test_update_account_rest_flattened_error(transport: str = "rest"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_account( + accounts.UpdateAccountRequest(), + account=accounts.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_account_rest_error(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.ListAccountsRequest, + dict, + ], +) +def test_list_accounts_rest(request_type): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.ListAccountsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.ListAccountsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_accounts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_accounts_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_accounts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_accounts] = mock_rpc + + request = {} + client.list_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_accounts_rest_interceptors(null_interceptor): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountsServiceRestInterceptor(), + ) + client = AccountsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountsServiceRestInterceptor, "post_list_accounts" + ) as post, mock.patch.object( + transports.AccountsServiceRestInterceptor, "pre_list_accounts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = accounts.ListAccountsRequest.pb(accounts.ListAccountsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = accounts.ListAccountsResponse.to_json( + accounts.ListAccountsResponse() + ) + + request = accounts.ListAccountsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = accounts.ListAccountsResponse() + + client.list_accounts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_accounts_rest_bad_request( + transport: str = "rest", request_type=accounts.ListAccountsRequest +): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_accounts(request) + + +def test_list_accounts_rest_pager(transport: str = "rest"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(accounts.ListAccountsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_accounts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, accounts.Account) for i in results) + + pages = list(client.list_accounts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + accounts.ListSubAccountsRequest, + dict, + ], +) +def test_list_sub_accounts_rest(request_type): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"provider": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.ListSubAccountsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.ListSubAccountsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_sub_accounts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubAccountsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_sub_accounts_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sub_accounts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_sub_accounts + ] = mock_rpc + + request = {} + client.list_sub_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sub_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sub_accounts_rest_required_fields( + request_type=accounts.ListSubAccountsRequest, +): + transport_class = transports.AccountsServiceRestTransport + + request_init = {} + request_init["provider"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sub_accounts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["provider"] = "provider_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sub_accounts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "provider" in jsonified_request + assert jsonified_request["provider"] == "provider_value" + + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = accounts.ListSubAccountsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = accounts.ListSubAccountsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_sub_accounts(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_sub_accounts_rest_unset_required_fields(): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_sub_accounts._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("provider",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sub_accounts_rest_interceptors(null_interceptor): + transport = transports.AccountsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccountsServiceRestInterceptor(), + ) + client = AccountsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AccountsServiceRestInterceptor, "post_list_sub_accounts" + ) as post, mock.patch.object( + transports.AccountsServiceRestInterceptor, "pre_list_sub_accounts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = accounts.ListSubAccountsRequest.pb( + accounts.ListSubAccountsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = accounts.ListSubAccountsResponse.to_json( + accounts.ListSubAccountsResponse() + ) + + request = accounts.ListSubAccountsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = accounts.ListSubAccountsResponse() + + client.list_sub_accounts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_sub_accounts_rest_bad_request( + transport: str = "rest", request_type=accounts.ListSubAccountsRequest +): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"provider": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_sub_accounts(request) + + +def test_list_sub_accounts_rest_flattened(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = accounts.ListSubAccountsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"provider": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + provider="provider_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = accounts.ListSubAccountsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_sub_accounts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{provider=accounts/*}:listSubaccounts" + % client.transport._host, + args[1], + ) + + +def test_list_sub_accounts_rest_flattened_error(transport: str = "rest"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sub_accounts( + accounts.ListSubAccountsRequest(), + provider="provider_value", + ) + + +def test_list_sub_accounts_rest_pager(transport: str = "rest"): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + accounts.Account(), + ], + next_page_token="abc", + ), + accounts.ListSubAccountsResponse( + accounts=[], + next_page_token="def", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + ], + next_page_token="ghi", + ), + accounts.ListSubAccountsResponse( + accounts=[ + accounts.Account(), + accounts.Account(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(accounts.ListSubAccountsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"provider": "accounts/sample1"} + + pager = client.list_sub_accounts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, accounts.Account) for i in results) + + pages = list(client.list_sub_accounts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AccountsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AccountsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AccountsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccountsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccountsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AccountsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccountsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccountsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AccountsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccountsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AccountsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceGrpcAsyncIOTransport, + transports.AccountsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AccountsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AccountsServiceGrpcTransport, + ) + + +def test_accounts_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AccountsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_accounts_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.accounts_service.transports.AccountsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AccountsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_account", + "create_and_configure_account", + "delete_account", + "update_account", + "list_accounts", + "list_sub_accounts", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_accounts_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.accounts_service.transports.AccountsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccountsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_accounts_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.accounts_service.transports.AccountsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccountsServiceTransport() + adc.assert_called_once() + + +def test_accounts_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AccountsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceGrpcAsyncIOTransport, + ], +) +def test_accounts_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceGrpcAsyncIOTransport, + transports.AccountsServiceRestTransport, + ], +) +def test_accounts_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AccountsServiceGrpcTransport, grpc_helpers), + (transports.AccountsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_accounts_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceGrpcAsyncIOTransport, + ], +) +def test_accounts_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_accounts_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AccountsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_accounts_service_host_no_port(transport_name): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_accounts_service_host_with_port(transport_name): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_accounts_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AccountsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AccountsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_account._session + session2 = client2.transport.get_account._session + assert session1 != session2 + session1 = client1.transport.create_and_configure_account._session + session2 = client2.transport.create_and_configure_account._session + assert session1 != session2 + session1 = client1.transport.delete_account._session + session2 = client2.transport.delete_account._session + assert session1 != session2 + session1 = client1.transport.update_account._session + session2 = client2.transport.update_account._session + assert session1 != session2 + session1 = client1.transport.list_accounts._session + session2 = client2.transport.list_accounts._session + assert session1 != session2 + session1 = client1.transport.list_sub_accounts._session + session2 = client2.transport.list_sub_accounts._session + assert session1 != session2 + + +def test_accounts_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccountsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_accounts_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccountsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceGrpcAsyncIOTransport, + ], +) +def test_accounts_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccountsServiceGrpcTransport, + transports.AccountsServiceGrpcAsyncIOTransport, + ], +) +def test_accounts_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = AccountsServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = AccountsServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_account_path(path) + assert expected == actual + + +def test_terms_of_service_path(): + version = "whelk" + expected = "termsOfService/{version}".format( + version=version, + ) + actual = AccountsServiceClient.terms_of_service_path(version) + assert expected == actual + + +def test_parse_terms_of_service_path(): + expected = { + "version": "octopus", + } + path = AccountsServiceClient.terms_of_service_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_terms_of_service_path(path) + assert expected == actual + + +def test_user_path(): + account = "oyster" + email = "nudibranch" + expected = "accounts/{account}/users/{email}".format( + account=account, + email=email, + ) + actual = AccountsServiceClient.user_path(account, email) + assert expected == actual + + +def test_parse_user_path(): + expected = { + "account": "cuttlefish", + "email": "mussel", + } + path = AccountsServiceClient.user_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_user_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AccountsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = AccountsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AccountsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = AccountsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AccountsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = AccountsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = AccountsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = AccountsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AccountsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = AccountsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AccountsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AccountsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AccountsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AccountsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AccountsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AccountsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AccountsServiceClient, transports.AccountsServiceGrpcTransport), + (AccountsServiceAsyncClient, transports.AccountsServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py new file mode 100644 index 000000000000..a29165a9fe50 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py @@ -0,0 +1,3489 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.business_identity_service import ( + BusinessIdentityServiceAsyncClient, + BusinessIdentityServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import businessidentity + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BusinessIdentityServiceClient._get_default_mtls_endpoint(None) is None + assert ( + BusinessIdentityServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessIdentityServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessIdentityServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessIdentityServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessIdentityServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + BusinessIdentityServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessIdentityServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BusinessIdentityServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BusinessIdentityServiceClient._get_client_cert_source(None, False) is None + assert ( + BusinessIdentityServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BusinessIdentityServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BusinessIdentityServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BusinessIdentityServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + BusinessIdentityServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceClient), +) +@mock.patch.object( + BusinessIdentityServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BusinessIdentityServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessIdentityServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BusinessIdentityServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BusinessIdentityServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == BusinessIdentityServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BusinessIdentityServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + BusinessIdentityServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessIdentityServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BusinessIdentityServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BusinessIdentityServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BusinessIdentityServiceClient._get_universe_domain(None, None) + == BusinessIdentityServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BusinessIdentityServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessIdentityServiceClient, "grpc"), + (BusinessIdentityServiceAsyncClient, "grpc_asyncio"), + (BusinessIdentityServiceClient, "rest"), + ], +) +def test_business_identity_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BusinessIdentityServiceGrpcTransport, "grpc"), + (transports.BusinessIdentityServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BusinessIdentityServiceRestTransport, "rest"), + ], +) +def test_business_identity_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessIdentityServiceClient, "grpc"), + (BusinessIdentityServiceAsyncClient, "grpc_asyncio"), + (BusinessIdentityServiceClient, "rest"), + ], +) +def test_business_identity_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_business_identity_service_client_get_transport_class(): + transport = BusinessIdentityServiceClient.get_transport_class() + available_transports = [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceRestTransport, + ] + assert transport in available_transports + + transport = BusinessIdentityServiceClient.get_transport_class("grpc") + assert transport == transports.BusinessIdentityServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + BusinessIdentityServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceClient), +) +@mock.patch.object( + BusinessIdentityServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceAsyncClient), +) +def test_business_identity_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BusinessIdentityServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BusinessIdentityServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + "true", + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + "false", + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceRestTransport, + "rest", + "true", + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + BusinessIdentityServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceClient), +) +@mock.patch.object( + BusinessIdentityServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_business_identity_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [BusinessIdentityServiceClient, BusinessIdentityServiceAsyncClient] +) +@mock.patch.object( + BusinessIdentityServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessIdentityServiceClient), +) +@mock.patch.object( + BusinessIdentityServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessIdentityServiceAsyncClient), +) +def test_business_identity_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [BusinessIdentityServiceClient, BusinessIdentityServiceAsyncClient] +) +@mock.patch.object( + BusinessIdentityServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceClient), +) +@mock.patch.object( + BusinessIdentityServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessIdentityServiceAsyncClient), +) +def test_business_identity_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BusinessIdentityServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessIdentityServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BusinessIdentityServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceRestTransport, + "rest", + ), + ], +) +def test_business_identity_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceRestTransport, + "rest", + None, + ), + ], +) +def test_business_identity_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_business_identity_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_identity_service.transports.BusinessIdentityServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BusinessIdentityServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_business_identity_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessidentity.GetBusinessIdentityRequest, + dict, + ], +) +def test_get_business_identity(request_type, transport: str = "grpc"): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + response = client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = businessidentity.GetBusinessIdentityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessidentity.BusinessIdentity) + assert response.name == "name_value" + assert ( + response.promotions_consent + == businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN + ) + + +def test_get_business_identity_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_business_identity() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessidentity.GetBusinessIdentityRequest() + + +def test_get_business_identity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = businessidentity.GetBusinessIdentityRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_business_identity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessidentity.GetBusinessIdentityRequest( + name="name_value", + ) + + +def test_get_business_identity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_business_identity + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_business_identity + ] = mock_rpc + request = {} + client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_business_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_business_identity_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + ) + response = await client.get_business_identity() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessidentity.GetBusinessIdentityRequest() + + +@pytest.mark.asyncio +async def test_get_business_identity_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_business_identity + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_business_identity + ] = mock_object + + request = {} + await client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_business_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_business_identity_async( + transport: str = "grpc_asyncio", + request_type=businessidentity.GetBusinessIdentityRequest, +): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + ) + response = await client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = businessidentity.GetBusinessIdentityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessidentity.BusinessIdentity) + assert response.name == "name_value" + assert ( + response.promotions_consent + == businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN + ) + + +@pytest.mark.asyncio +async def test_get_business_identity_async_from_dict(): + await test_get_business_identity_async(request_type=dict) + + +def test_get_business_identity_field_headers(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessidentity.GetBusinessIdentityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + call.return_value = businessidentity.BusinessIdentity() + client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_business_identity_field_headers_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessidentity.GetBusinessIdentityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity() + ) + await client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_business_identity_flattened(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessidentity.BusinessIdentity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_business_identity( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_business_identity_flattened_error(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_business_identity( + businessidentity.GetBusinessIdentityRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_business_identity_flattened_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessidentity.BusinessIdentity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_business_identity( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_business_identity_flattened_error_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_business_identity( + businessidentity.GetBusinessIdentityRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessidentity.UpdateBusinessIdentityRequest, + dict, + ], +) +def test_update_business_identity(request_type, transport: str = "grpc"): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + response = client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = businessidentity.UpdateBusinessIdentityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessidentity.BusinessIdentity) + assert response.name == "name_value" + assert ( + response.promotions_consent + == businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN + ) + + +def test_update_business_identity_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_business_identity() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessidentity.UpdateBusinessIdentityRequest() + + +def test_update_business_identity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = businessidentity.UpdateBusinessIdentityRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_business_identity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessidentity.UpdateBusinessIdentityRequest() + + +def test_update_business_identity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_business_identity + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_business_identity + ] = mock_rpc + request = {} + client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_business_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_business_identity_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + ) + response = await client.update_business_identity() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessidentity.UpdateBusinessIdentityRequest() + + +@pytest.mark.asyncio +async def test_update_business_identity_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_business_identity + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_business_identity + ] = mock_object + + request = {} + await client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_business_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_business_identity_async( + transport: str = "grpc_asyncio", + request_type=businessidentity.UpdateBusinessIdentityRequest, +): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + ) + response = await client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = businessidentity.UpdateBusinessIdentityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessidentity.BusinessIdentity) + assert response.name == "name_value" + assert ( + response.promotions_consent + == businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN + ) + + +@pytest.mark.asyncio +async def test_update_business_identity_async_from_dict(): + await test_update_business_identity_async(request_type=dict) + + +def test_update_business_identity_field_headers(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessidentity.UpdateBusinessIdentityRequest() + + request.business_identity.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + call.return_value = businessidentity.BusinessIdentity() + client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "business_identity.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_business_identity_field_headers_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessidentity.UpdateBusinessIdentityRequest() + + request.business_identity.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity() + ) + await client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "business_identity.name=name_value", + ) in kw["metadata"] + + +def test_update_business_identity_flattened(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessidentity.BusinessIdentity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_business_identity( + business_identity=businessidentity.BusinessIdentity(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].business_identity + mock_val = businessidentity.BusinessIdentity(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_business_identity_flattened_error(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_business_identity( + businessidentity.UpdateBusinessIdentityRequest(), + business_identity=businessidentity.BusinessIdentity(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_business_identity_flattened_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_identity), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessidentity.BusinessIdentity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessidentity.BusinessIdentity() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_business_identity( + business_identity=businessidentity.BusinessIdentity(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].business_identity + mock_val = businessidentity.BusinessIdentity(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_business_identity_flattened_error_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_business_identity( + businessidentity.UpdateBusinessIdentityRequest(), + business_identity=businessidentity.BusinessIdentity(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessidentity.GetBusinessIdentityRequest, + dict, + ], +) +def test_get_business_identity_rest(request_type): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/businessIdentity"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessidentity.BusinessIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_business_identity(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, businessidentity.BusinessIdentity) + assert response.name == "name_value" + assert ( + response.promotions_consent + == businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN + ) + + +def test_get_business_identity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_business_identity + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_business_identity + ] = mock_rpc + + request = {} + client.get_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_business_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_business_identity_rest_required_fields( + request_type=businessidentity.GetBusinessIdentityRequest, +): + transport_class = transports.BusinessIdentityServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_business_identity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_business_identity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = businessidentity.BusinessIdentity() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = businessidentity.BusinessIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_business_identity(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_business_identity_rest_unset_required_fields(): + transport = transports.BusinessIdentityServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_business_identity._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_business_identity_rest_interceptors(null_interceptor): + transport = transports.BusinessIdentityServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessIdentityServiceRestInterceptor(), + ) + client = BusinessIdentityServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessIdentityServiceRestInterceptor, "post_get_business_identity" + ) as post, mock.patch.object( + transports.BusinessIdentityServiceRestInterceptor, "pre_get_business_identity" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = businessidentity.GetBusinessIdentityRequest.pb( + businessidentity.GetBusinessIdentityRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = businessidentity.BusinessIdentity.to_json( + businessidentity.BusinessIdentity() + ) + + request = businessidentity.GetBusinessIdentityRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = businessidentity.BusinessIdentity() + + client.get_business_identity( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_business_identity_rest_bad_request( + transport: str = "rest", request_type=businessidentity.GetBusinessIdentityRequest +): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/businessIdentity"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_business_identity(request) + + +def test_get_business_identity_rest_flattened(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessidentity.BusinessIdentity() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/businessIdentity"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessidentity.BusinessIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_business_identity(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/businessIdentity}" + % client.transport._host, + args[1], + ) + + +def test_get_business_identity_rest_flattened_error(transport: str = "rest"): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_business_identity( + businessidentity.GetBusinessIdentityRequest(), + name="name_value", + ) + + +def test_get_business_identity_rest_error(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessidentity.UpdateBusinessIdentityRequest, + dict, + ], +) +def test_update_business_identity_rest(request_type): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"business_identity": {"name": "accounts/sample1/businessIdentity"}} + request_init["business_identity"] = { + "name": "accounts/sample1/businessIdentity", + "promotions_consent": 1, + "black_owned": {"identity_declaration": 1}, + "women_owned": {}, + "veteran_owned": {}, + "latino_owned": {}, + "small_business": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = businessidentity.UpdateBusinessIdentityRequest.meta.fields[ + "business_identity" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["business_identity"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["business_identity"][field])): + del request_init["business_identity"][field][i][subfield] + else: + del request_init["business_identity"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessidentity.BusinessIdentity( + name="name_value", + promotions_consent=businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessidentity.BusinessIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_business_identity(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, businessidentity.BusinessIdentity) + assert response.name == "name_value" + assert ( + response.promotions_consent + == businessidentity.BusinessIdentity.PromotionsConsent.PROMOTIONS_CONSENT_GIVEN + ) + + +def test_update_business_identity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_business_identity + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_business_identity + ] = mock_rpc + + request = {} + client.update_business_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_business_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_business_identity_rest_required_fields( + request_type=businessidentity.UpdateBusinessIdentityRequest, +): + transport_class = transports.BusinessIdentityServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_business_identity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_business_identity._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = businessidentity.BusinessIdentity() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = businessidentity.BusinessIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_business_identity(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_business_identity_rest_unset_required_fields(): + transport = transports.BusinessIdentityServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_business_identity._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "businessIdentity", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_business_identity_rest_interceptors(null_interceptor): + transport = transports.BusinessIdentityServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessIdentityServiceRestInterceptor(), + ) + client = BusinessIdentityServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessIdentityServiceRestInterceptor, + "post_update_business_identity", + ) as post, mock.patch.object( + transports.BusinessIdentityServiceRestInterceptor, + "pre_update_business_identity", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = businessidentity.UpdateBusinessIdentityRequest.pb( + businessidentity.UpdateBusinessIdentityRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = businessidentity.BusinessIdentity.to_json( + businessidentity.BusinessIdentity() + ) + + request = businessidentity.UpdateBusinessIdentityRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = businessidentity.BusinessIdentity() + + client.update_business_identity( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_business_identity_rest_bad_request( + transport: str = "rest", request_type=businessidentity.UpdateBusinessIdentityRequest +): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"business_identity": {"name": "accounts/sample1/businessIdentity"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_business_identity(request) + + +def test_update_business_identity_rest_flattened(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessidentity.BusinessIdentity() + + # get arguments that satisfy an http rule for this method + sample_request = { + "business_identity": {"name": "accounts/sample1/businessIdentity"} + } + + # get truthy value for each flattened field + mock_args = dict( + business_identity=businessidentity.BusinessIdentity(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessidentity.BusinessIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_business_identity(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{business_identity.name=accounts/*/businessIdentity}" + % client.transport._host, + args[1], + ) + + +def test_update_business_identity_rest_flattened_error(transport: str = "rest"): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_business_identity( + businessidentity.UpdateBusinessIdentityRequest(), + business_identity=businessidentity.BusinessIdentity(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_business_identity_rest_error(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BusinessIdentityServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BusinessIdentityServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessIdentityServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BusinessIdentityServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessIdentityServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessIdentityServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BusinessIdentityServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessIdentityServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessIdentityServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BusinessIdentityServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessIdentityServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BusinessIdentityServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + transports.BusinessIdentityServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = BusinessIdentityServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BusinessIdentityServiceGrpcTransport, + ) + + +def test_business_identity_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BusinessIdentityServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_business_identity_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_identity_service.transports.BusinessIdentityServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BusinessIdentityServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_business_identity", + "update_business_identity", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_business_identity_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_identity_service.transports.BusinessIdentityServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessIdentityServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_business_identity_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_identity_service.transports.BusinessIdentityServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessIdentityServiceTransport() + adc.assert_called_once() + + +def test_business_identity_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BusinessIdentityServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + ], +) +def test_business_identity_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + transports.BusinessIdentityServiceRestTransport, + ], +) +def test_business_identity_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BusinessIdentityServiceGrpcTransport, grpc_helpers), + (transports.BusinessIdentityServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_business_identity_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + ], +) +def test_business_identity_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_business_identity_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BusinessIdentityServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_business_identity_service_host_no_port(transport_name): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_business_identity_service_host_with_port(transport_name): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_business_identity_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BusinessIdentityServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BusinessIdentityServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_business_identity._session + session2 = client2.transport.get_business_identity._session + assert session1 != session2 + session1 = client1.transport.update_business_identity._session + session2 = client2.transport.update_business_identity._session + assert session1 != session2 + + +def test_business_identity_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessIdentityServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_business_identity_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessIdentityServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + ], +) +def test_business_identity_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessIdentityServiceGrpcTransport, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + ], +) +def test_business_identity_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_business_identity_path(): + account = "squid" + expected = "accounts/{account}/businessIdentity".format( + account=account, + ) + actual = BusinessIdentityServiceClient.business_identity_path(account) + assert expected == actual + + +def test_parse_business_identity_path(): + expected = { + "account": "clam", + } + path = BusinessIdentityServiceClient.business_identity_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessIdentityServiceClient.parse_business_identity_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BusinessIdentityServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = BusinessIdentityServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessIdentityServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BusinessIdentityServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = BusinessIdentityServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessIdentityServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BusinessIdentityServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = BusinessIdentityServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessIdentityServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = BusinessIdentityServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = BusinessIdentityServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessIdentityServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BusinessIdentityServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = BusinessIdentityServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessIdentityServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BusinessIdentityServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BusinessIdentityServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BusinessIdentityServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = BusinessIdentityServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BusinessIdentityServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + BusinessIdentityServiceClient, + transports.BusinessIdentityServiceGrpcTransport, + ), + ( + BusinessIdentityServiceAsyncClient, + transports.BusinessIdentityServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py new file mode 100644 index 000000000000..e0d3e8aee83b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py @@ -0,0 +1,3487 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.type import phone_number_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.business_info_service import ( + BusinessInfoServiceAsyncClient, + BusinessInfoServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import ( + businessinfo, + customerservice, + phoneverificationstate, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BusinessInfoServiceClient._get_default_mtls_endpoint(None) is None + assert ( + BusinessInfoServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessInfoServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessInfoServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessInfoServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessInfoServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert BusinessInfoServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BusinessInfoServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BusinessInfoServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + BusinessInfoServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BusinessInfoServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BusinessInfoServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BusinessInfoServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessInfoServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BusinessInfoServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BusinessInfoServiceClient._get_client_cert_source(None, False) is None + assert ( + BusinessInfoServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BusinessInfoServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BusinessInfoServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BusinessInfoServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + BusinessInfoServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceClient), +) +@mock.patch.object( + BusinessInfoServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BusinessInfoServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessInfoServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BusinessInfoServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BusinessInfoServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BusinessInfoServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BusinessInfoServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessInfoServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BusinessInfoServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == BusinessInfoServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessInfoServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BusinessInfoServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessInfoServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + BusinessInfoServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessInfoServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BusinessInfoServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BusinessInfoServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BusinessInfoServiceClient._get_universe_domain(None, None) + == BusinessInfoServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BusinessInfoServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessInfoServiceClient, "grpc"), + (BusinessInfoServiceAsyncClient, "grpc_asyncio"), + (BusinessInfoServiceClient, "rest"), + ], +) +def test_business_info_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BusinessInfoServiceGrpcTransport, "grpc"), + (transports.BusinessInfoServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BusinessInfoServiceRestTransport, "rest"), + ], +) +def test_business_info_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessInfoServiceClient, "grpc"), + (BusinessInfoServiceAsyncClient, "grpc_asyncio"), + (BusinessInfoServiceClient, "rest"), + ], +) +def test_business_info_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_business_info_service_client_get_transport_class(): + transport = BusinessInfoServiceClient.get_transport_class() + available_transports = [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceRestTransport, + ] + assert transport in available_transports + + transport = BusinessInfoServiceClient.get_transport_class("grpc") + assert transport == transports.BusinessInfoServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + ), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + BusinessInfoServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceClient), +) +@mock.patch.object( + BusinessInfoServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceAsyncClient), +) +def test_business_info_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BusinessInfoServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BusinessInfoServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + "true", + ), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + "false", + ), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceRestTransport, + "rest", + "true", + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + BusinessInfoServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceClient), +) +@mock.patch.object( + BusinessInfoServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_business_info_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [BusinessInfoServiceClient, BusinessInfoServiceAsyncClient] +) +@mock.patch.object( + BusinessInfoServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessInfoServiceClient), +) +@mock.patch.object( + BusinessInfoServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessInfoServiceAsyncClient), +) +def test_business_info_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [BusinessInfoServiceClient, BusinessInfoServiceAsyncClient] +) +@mock.patch.object( + BusinessInfoServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceClient), +) +@mock.patch.object( + BusinessInfoServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessInfoServiceAsyncClient), +) +def test_business_info_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BusinessInfoServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessInfoServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BusinessInfoServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + ), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceRestTransport, + "rest", + ), + ], +) +def test_business_info_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceRestTransport, + "rest", + None, + ), + ], +) +def test_business_info_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_business_info_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_info_service.transports.BusinessInfoServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BusinessInfoServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessInfoServiceClient, + transports.BusinessInfoServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_business_info_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessinfo.GetBusinessInfoRequest, + dict, + ], +) +def test_get_business_info(request_type, transport: str = "grpc"): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + response = client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = businessinfo.GetBusinessInfoRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessinfo.BusinessInfo) + assert response.name == "name_value" + assert ( + response.phone_verification_state + == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED + ) + + +def test_get_business_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_business_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessinfo.GetBusinessInfoRequest() + + +def test_get_business_info_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = businessinfo.GetBusinessInfoRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_business_info(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessinfo.GetBusinessInfoRequest( + name="name_value", + ) + + +def test_get_business_info_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_business_info in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_business_info + ] = mock_rpc + request = {} + client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_business_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_business_info_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.get_business_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessinfo.GetBusinessInfoRequest() + + +@pytest.mark.asyncio +async def test_get_business_info_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_business_info + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_business_info + ] = mock_object + + request = {} + await client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_business_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_business_info_async( + transport: str = "grpc_asyncio", request_type=businessinfo.GetBusinessInfoRequest +): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = businessinfo.GetBusinessInfoRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessinfo.BusinessInfo) + assert response.name == "name_value" + assert ( + response.phone_verification_state + == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED + ) + + +@pytest.mark.asyncio +async def test_get_business_info_async_from_dict(): + await test_get_business_info_async(request_type=dict) + + +def test_get_business_info_field_headers(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessinfo.GetBusinessInfoRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + call.return_value = businessinfo.BusinessInfo() + client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_business_info_field_headers_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessinfo.GetBusinessInfoRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo() + ) + await client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_business_info_flattened(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessinfo.BusinessInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_business_info( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_business_info_flattened_error(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_business_info( + businessinfo.GetBusinessInfoRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_business_info_flattened_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessinfo.BusinessInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_business_info( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_business_info_flattened_error_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_business_info( + businessinfo.GetBusinessInfoRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessinfo.UpdateBusinessInfoRequest, + dict, + ], +) +def test_update_business_info(request_type, transport: str = "grpc"): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + response = client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = businessinfo.UpdateBusinessInfoRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessinfo.BusinessInfo) + assert response.name == "name_value" + assert ( + response.phone_verification_state + == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED + ) + + +def test_update_business_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_business_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessinfo.UpdateBusinessInfoRequest() + + +def test_update_business_info_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = businessinfo.UpdateBusinessInfoRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_business_info(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessinfo.UpdateBusinessInfoRequest() + + +def test_update_business_info_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_business_info in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_business_info + ] = mock_rpc + request = {} + client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_business_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_business_info_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.update_business_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == businessinfo.UpdateBusinessInfoRequest() + + +@pytest.mark.asyncio +async def test_update_business_info_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_business_info + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_business_info + ] = mock_object + + request = {} + await client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_business_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_business_info_async( + transport: str = "grpc_asyncio", request_type=businessinfo.UpdateBusinessInfoRequest +): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = businessinfo.UpdateBusinessInfoRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, businessinfo.BusinessInfo) + assert response.name == "name_value" + assert ( + response.phone_verification_state + == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED + ) + + +@pytest.mark.asyncio +async def test_update_business_info_async_from_dict(): + await test_update_business_info_async(request_type=dict) + + +def test_update_business_info_field_headers(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessinfo.UpdateBusinessInfoRequest() + + request.business_info.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + call.return_value = businessinfo.BusinessInfo() + client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "business_info.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_business_info_field_headers_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = businessinfo.UpdateBusinessInfoRequest() + + request.business_info.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo() + ) + await client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "business_info.name=name_value", + ) in kw["metadata"] + + +def test_update_business_info_flattened(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessinfo.BusinessInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_business_info( + business_info=businessinfo.BusinessInfo(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].business_info + mock_val = businessinfo.BusinessInfo(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_business_info_flattened_error(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_business_info( + businessinfo.UpdateBusinessInfoRequest(), + business_info=businessinfo.BusinessInfo(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_business_info_flattened_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_business_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = businessinfo.BusinessInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + businessinfo.BusinessInfo() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_business_info( + business_info=businessinfo.BusinessInfo(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].business_info + mock_val = businessinfo.BusinessInfo(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_business_info_flattened_error_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_business_info( + businessinfo.UpdateBusinessInfoRequest(), + business_info=businessinfo.BusinessInfo(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessinfo.GetBusinessInfoRequest, + dict, + ], +) +def test_get_business_info_rest(request_type): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/businessInfo"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessinfo.BusinessInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_business_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, businessinfo.BusinessInfo) + assert response.name == "name_value" + assert ( + response.phone_verification_state + == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED + ) + + +def test_get_business_info_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_business_info in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_business_info + ] = mock_rpc + + request = {} + client.get_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_business_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_business_info_rest_required_fields( + request_type=businessinfo.GetBusinessInfoRequest, +): + transport_class = transports.BusinessInfoServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_business_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_business_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = businessinfo.BusinessInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = businessinfo.BusinessInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_business_info(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_business_info_rest_unset_required_fields(): + transport = transports.BusinessInfoServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_business_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_business_info_rest_interceptors(null_interceptor): + transport = transports.BusinessInfoServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessInfoServiceRestInterceptor(), + ) + client = BusinessInfoServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessInfoServiceRestInterceptor, "post_get_business_info" + ) as post, mock.patch.object( + transports.BusinessInfoServiceRestInterceptor, "pre_get_business_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = businessinfo.GetBusinessInfoRequest.pb( + businessinfo.GetBusinessInfoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = businessinfo.BusinessInfo.to_json( + businessinfo.BusinessInfo() + ) + + request = businessinfo.GetBusinessInfoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = businessinfo.BusinessInfo() + + client.get_business_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_business_info_rest_bad_request( + transport: str = "rest", request_type=businessinfo.GetBusinessInfoRequest +): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/businessInfo"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_business_info(request) + + +def test_get_business_info_rest_flattened(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessinfo.BusinessInfo() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/businessInfo"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessinfo.BusinessInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_business_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/businessInfo}" + % client.transport._host, + args[1], + ) + + +def test_get_business_info_rest_flattened_error(transport: str = "rest"): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_business_info( + businessinfo.GetBusinessInfoRequest(), + name="name_value", + ) + + +def test_get_business_info_rest_error(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + businessinfo.UpdateBusinessInfoRequest, + dict, + ], +) +def test_update_business_info_rest(request_type): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"business_info": {"name": "accounts/sample1/businessInfo"}} + request_init["business_info"] = { + "name": "accounts/sample1/businessInfo", + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "phone": { + "e164_number": "e164_number_value", + "short_code": { + "region_code": "region_code_value", + "number": "number_value", + }, + "extension": "extension_value", + }, + "phone_verification_state": 1, + "customer_service": {"uri": "uri_value", "email": "email_value", "phone": {}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = businessinfo.UpdateBusinessInfoRequest.meta.fields["business_info"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["business_info"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["business_info"][field])): + del request_init["business_info"][field][i][subfield] + else: + del request_init["business_info"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessinfo.BusinessInfo( + name="name_value", + phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessinfo.BusinessInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_business_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, businessinfo.BusinessInfo) + assert response.name == "name_value" + assert ( + response.phone_verification_state + == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED + ) + + +def test_update_business_info_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_business_info in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_business_info + ] = mock_rpc + + request = {} + client.update_business_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_business_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_business_info_rest_required_fields( + request_type=businessinfo.UpdateBusinessInfoRequest, +): + transport_class = transports.BusinessInfoServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_business_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_business_info._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = businessinfo.BusinessInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = businessinfo.BusinessInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_business_info(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_business_info_rest_unset_required_fields(): + transport = transports.BusinessInfoServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_business_info._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "businessInfo", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_business_info_rest_interceptors(null_interceptor): + transport = transports.BusinessInfoServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessInfoServiceRestInterceptor(), + ) + client = BusinessInfoServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessInfoServiceRestInterceptor, "post_update_business_info" + ) as post, mock.patch.object( + transports.BusinessInfoServiceRestInterceptor, "pre_update_business_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = businessinfo.UpdateBusinessInfoRequest.pb( + businessinfo.UpdateBusinessInfoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = businessinfo.BusinessInfo.to_json( + businessinfo.BusinessInfo() + ) + + request = businessinfo.UpdateBusinessInfoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = businessinfo.BusinessInfo() + + client.update_business_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_business_info_rest_bad_request( + transport: str = "rest", request_type=businessinfo.UpdateBusinessInfoRequest +): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"business_info": {"name": "accounts/sample1/businessInfo"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_business_info(request) + + +def test_update_business_info_rest_flattened(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = businessinfo.BusinessInfo() + + # get arguments that satisfy an http rule for this method + sample_request = {"business_info": {"name": "accounts/sample1/businessInfo"}} + + # get truthy value for each flattened field + mock_args = dict( + business_info=businessinfo.BusinessInfo(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = businessinfo.BusinessInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_business_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{business_info.name=accounts/*/businessInfo}" + % client.transport._host, + args[1], + ) + + +def test_update_business_info_rest_flattened_error(transport: str = "rest"): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_business_info( + businessinfo.UpdateBusinessInfoRequest(), + business_info=businessinfo.BusinessInfo(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_business_info_rest_error(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BusinessInfoServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BusinessInfoServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessInfoServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BusinessInfoServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessInfoServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessInfoServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BusinessInfoServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessInfoServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessInfoServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BusinessInfoServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessInfoServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BusinessInfoServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + transports.BusinessInfoServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = BusinessInfoServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BusinessInfoServiceGrpcTransport, + ) + + +def test_business_info_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BusinessInfoServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_business_info_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_info_service.transports.BusinessInfoServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BusinessInfoServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_business_info", + "update_business_info", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_business_info_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_info_service.transports.BusinessInfoServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessInfoServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_business_info_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.business_info_service.transports.BusinessInfoServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessInfoServiceTransport() + adc.assert_called_once() + + +def test_business_info_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BusinessInfoServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + ], +) +def test_business_info_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + transports.BusinessInfoServiceRestTransport, + ], +) +def test_business_info_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BusinessInfoServiceGrpcTransport, grpc_helpers), + (transports.BusinessInfoServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_business_info_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + ], +) +def test_business_info_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_business_info_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BusinessInfoServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_business_info_service_host_no_port(transport_name): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_business_info_service_host_with_port(transport_name): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_business_info_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BusinessInfoServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BusinessInfoServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_business_info._session + session2 = client2.transport.get_business_info._session + assert session1 != session2 + session1 = client1.transport.update_business_info._session + session2 = client2.transport.update_business_info._session + assert session1 != session2 + + +def test_business_info_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessInfoServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_business_info_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessInfoServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + ], +) +def test_business_info_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessInfoServiceGrpcTransport, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + ], +) +def test_business_info_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_business_info_path(): + account = "squid" + expected = "accounts/{account}/businessInfo".format( + account=account, + ) + actual = BusinessInfoServiceClient.business_info_path(account) + assert expected == actual + + +def test_parse_business_info_path(): + expected = { + "account": "clam", + } + path = BusinessInfoServiceClient.business_info_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessInfoServiceClient.parse_business_info_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BusinessInfoServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = BusinessInfoServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessInfoServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BusinessInfoServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = BusinessInfoServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessInfoServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BusinessInfoServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = BusinessInfoServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessInfoServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = BusinessInfoServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = BusinessInfoServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessInfoServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BusinessInfoServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = BusinessInfoServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessInfoServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BusinessInfoServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BusinessInfoServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BusinessInfoServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = BusinessInfoServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BusinessInfoServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BusinessInfoServiceClient, transports.BusinessInfoServiceGrpcTransport), + ( + BusinessInfoServiceAsyncClient, + transports.BusinessInfoServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py new file mode 100644 index 000000000000..df0d11d94445 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py @@ -0,0 +1,3487 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.email_preferences_service import ( + EmailPreferencesServiceAsyncClient, + EmailPreferencesServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import emailpreferences + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert EmailPreferencesServiceClient._get_default_mtls_endpoint(None) is None + assert ( + EmailPreferencesServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + EmailPreferencesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + EmailPreferencesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EmailPreferencesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EmailPreferencesServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + EmailPreferencesServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + EmailPreferencesServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert EmailPreferencesServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert EmailPreferencesServiceClient._get_client_cert_source(None, False) is None + assert ( + EmailPreferencesServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + EmailPreferencesServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + EmailPreferencesServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + EmailPreferencesServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + EmailPreferencesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceClient), +) +@mock.patch.object( + EmailPreferencesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = EmailPreferencesServiceClient._DEFAULT_UNIVERSE + default_endpoint = EmailPreferencesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = EmailPreferencesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == EmailPreferencesServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == EmailPreferencesServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == EmailPreferencesServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + EmailPreferencesServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + EmailPreferencesServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + EmailPreferencesServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + EmailPreferencesServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + EmailPreferencesServiceClient._get_universe_domain(None, None) + == EmailPreferencesServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + EmailPreferencesServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EmailPreferencesServiceClient, "grpc"), + (EmailPreferencesServiceAsyncClient, "grpc_asyncio"), + (EmailPreferencesServiceClient, "rest"), + ], +) +def test_email_preferences_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.EmailPreferencesServiceGrpcTransport, "grpc"), + (transports.EmailPreferencesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.EmailPreferencesServiceRestTransport, "rest"), + ], +) +def test_email_preferences_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EmailPreferencesServiceClient, "grpc"), + (EmailPreferencesServiceAsyncClient, "grpc_asyncio"), + (EmailPreferencesServiceClient, "rest"), + ], +) +def test_email_preferences_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_email_preferences_service_client_get_transport_class(): + transport = EmailPreferencesServiceClient.get_transport_class() + available_transports = [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceRestTransport, + ] + assert transport in available_transports + + transport = EmailPreferencesServiceClient.get_transport_class("grpc") + assert transport == transports.EmailPreferencesServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + EmailPreferencesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceClient), +) +@mock.patch.object( + EmailPreferencesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceAsyncClient), +) +def test_email_preferences_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(EmailPreferencesServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(EmailPreferencesServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + "true", + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + "false", + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceRestTransport, + "rest", + "true", + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + EmailPreferencesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceClient), +) +@mock.patch.object( + EmailPreferencesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_email_preferences_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [EmailPreferencesServiceClient, EmailPreferencesServiceAsyncClient] +) +@mock.patch.object( + EmailPreferencesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EmailPreferencesServiceClient), +) +@mock.patch.object( + EmailPreferencesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EmailPreferencesServiceAsyncClient), +) +def test_email_preferences_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [EmailPreferencesServiceClient, EmailPreferencesServiceAsyncClient] +) +@mock.patch.object( + EmailPreferencesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceClient), +) +@mock.patch.object( + EmailPreferencesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EmailPreferencesServiceAsyncClient), +) +def test_email_preferences_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = EmailPreferencesServiceClient._DEFAULT_UNIVERSE + default_endpoint = EmailPreferencesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = EmailPreferencesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceRestTransport, + "rest", + ), + ], +) +def test_email_preferences_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceRestTransport, + "rest", + None, + ), + ], +) +def test_email_preferences_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_email_preferences_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.email_preferences_service.transports.EmailPreferencesServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = EmailPreferencesServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_email_preferences_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + emailpreferences.GetEmailPreferencesRequest, + dict, + ], +) +def test_get_email_preferences(request_type, transport: str = "grpc"): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + response = client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = emailpreferences.GetEmailPreferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, emailpreferences.EmailPreferences) + assert response.name == "name_value" + assert ( + response.news_and_tips == emailpreferences.EmailPreferences.OptInState.OPTED_OUT + ) + + +def test_get_email_preferences_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_email_preferences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == emailpreferences.GetEmailPreferencesRequest() + + +def test_get_email_preferences_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = emailpreferences.GetEmailPreferencesRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_email_preferences(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == emailpreferences.GetEmailPreferencesRequest( + name="name_value", + ) + + +def test_get_email_preferences_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_email_preferences + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_email_preferences + ] = mock_rpc + request = {} + client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_email_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_email_preferences_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + ) + response = await client.get_email_preferences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == emailpreferences.GetEmailPreferencesRequest() + + +@pytest.mark.asyncio +async def test_get_email_preferences_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_email_preferences + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_email_preferences + ] = mock_object + + request = {} + await client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_email_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_email_preferences_async( + transport: str = "grpc_asyncio", + request_type=emailpreferences.GetEmailPreferencesRequest, +): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + ) + response = await client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = emailpreferences.GetEmailPreferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, emailpreferences.EmailPreferences) + assert response.name == "name_value" + assert ( + response.news_and_tips == emailpreferences.EmailPreferences.OptInState.OPTED_OUT + ) + + +@pytest.mark.asyncio +async def test_get_email_preferences_async_from_dict(): + await test_get_email_preferences_async(request_type=dict) + + +def test_get_email_preferences_field_headers(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = emailpreferences.GetEmailPreferencesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + call.return_value = emailpreferences.EmailPreferences() + client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_email_preferences_field_headers_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = emailpreferences.GetEmailPreferencesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences() + ) + await client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_email_preferences_flattened(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = emailpreferences.EmailPreferences() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_email_preferences( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_email_preferences_flattened_error(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_email_preferences( + emailpreferences.GetEmailPreferencesRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_email_preferences_flattened_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = emailpreferences.EmailPreferences() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_email_preferences( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_email_preferences_flattened_error_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_email_preferences( + emailpreferences.GetEmailPreferencesRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + emailpreferences.UpdateEmailPreferencesRequest, + dict, + ], +) +def test_update_email_preferences(request_type, transport: str = "grpc"): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + response = client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = emailpreferences.UpdateEmailPreferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, emailpreferences.EmailPreferences) + assert response.name == "name_value" + assert ( + response.news_and_tips == emailpreferences.EmailPreferences.OptInState.OPTED_OUT + ) + + +def test_update_email_preferences_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_email_preferences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == emailpreferences.UpdateEmailPreferencesRequest() + + +def test_update_email_preferences_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = emailpreferences.UpdateEmailPreferencesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_email_preferences(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == emailpreferences.UpdateEmailPreferencesRequest() + + +def test_update_email_preferences_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_email_preferences + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_email_preferences + ] = mock_rpc + request = {} + client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_email_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_email_preferences_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + ) + response = await client.update_email_preferences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == emailpreferences.UpdateEmailPreferencesRequest() + + +@pytest.mark.asyncio +async def test_update_email_preferences_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_email_preferences + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_email_preferences + ] = mock_object + + request = {} + await client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_email_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_email_preferences_async( + transport: str = "grpc_asyncio", + request_type=emailpreferences.UpdateEmailPreferencesRequest, +): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + ) + response = await client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = emailpreferences.UpdateEmailPreferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, emailpreferences.EmailPreferences) + assert response.name == "name_value" + assert ( + response.news_and_tips == emailpreferences.EmailPreferences.OptInState.OPTED_OUT + ) + + +@pytest.mark.asyncio +async def test_update_email_preferences_async_from_dict(): + await test_update_email_preferences_async(request_type=dict) + + +def test_update_email_preferences_field_headers(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = emailpreferences.UpdateEmailPreferencesRequest() + + request.email_preferences.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + call.return_value = emailpreferences.EmailPreferences() + client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "email_preferences.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_email_preferences_field_headers_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = emailpreferences.UpdateEmailPreferencesRequest() + + request.email_preferences.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences() + ) + await client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "email_preferences.name=name_value", + ) in kw["metadata"] + + +def test_update_email_preferences_flattened(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = emailpreferences.EmailPreferences() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_email_preferences( + email_preferences=emailpreferences.EmailPreferences(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].email_preferences + mock_val = emailpreferences.EmailPreferences(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_email_preferences_flattened_error(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_email_preferences( + emailpreferences.UpdateEmailPreferencesRequest(), + email_preferences=emailpreferences.EmailPreferences(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_email_preferences_flattened_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_email_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = emailpreferences.EmailPreferences() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + emailpreferences.EmailPreferences() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_email_preferences( + email_preferences=emailpreferences.EmailPreferences(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].email_preferences + mock_val = emailpreferences.EmailPreferences(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_email_preferences_flattened_error_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_email_preferences( + emailpreferences.UpdateEmailPreferencesRequest(), + email_preferences=emailpreferences.EmailPreferences(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + emailpreferences.GetEmailPreferencesRequest, + dict, + ], +) +def test_get_email_preferences_rest(request_type): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/users/sample2/emailPreferences"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = emailpreferences.EmailPreferences.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_email_preferences(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, emailpreferences.EmailPreferences) + assert response.name == "name_value" + assert ( + response.news_and_tips == emailpreferences.EmailPreferences.OptInState.OPTED_OUT + ) + + +def test_get_email_preferences_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_email_preferences + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_email_preferences + ] = mock_rpc + + request = {} + client.get_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_email_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_email_preferences_rest_required_fields( + request_type=emailpreferences.GetEmailPreferencesRequest, +): + transport_class = transports.EmailPreferencesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_email_preferences._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_email_preferences._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = emailpreferences.EmailPreferences() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = emailpreferences.EmailPreferences.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_email_preferences(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_email_preferences_rest_unset_required_fields(): + transport = transports.EmailPreferencesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_email_preferences._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_email_preferences_rest_interceptors(null_interceptor): + transport = transports.EmailPreferencesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EmailPreferencesServiceRestInterceptor(), + ) + client = EmailPreferencesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EmailPreferencesServiceRestInterceptor, "post_get_email_preferences" + ) as post, mock.patch.object( + transports.EmailPreferencesServiceRestInterceptor, "pre_get_email_preferences" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = emailpreferences.GetEmailPreferencesRequest.pb( + emailpreferences.GetEmailPreferencesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = emailpreferences.EmailPreferences.to_json( + emailpreferences.EmailPreferences() + ) + + request = emailpreferences.GetEmailPreferencesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = emailpreferences.EmailPreferences() + + client.get_email_preferences( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_email_preferences_rest_bad_request( + transport: str = "rest", request_type=emailpreferences.GetEmailPreferencesRequest +): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/users/sample2/emailPreferences"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_email_preferences(request) + + +def test_get_email_preferences_rest_flattened(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = emailpreferences.EmailPreferences() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/users/sample2/emailPreferences"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = emailpreferences.EmailPreferences.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_email_preferences(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/users/*/emailPreferences}" + % client.transport._host, + args[1], + ) + + +def test_get_email_preferences_rest_flattened_error(transport: str = "rest"): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_email_preferences( + emailpreferences.GetEmailPreferencesRequest(), + name="name_value", + ) + + +def test_get_email_preferences_rest_error(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + emailpreferences.UpdateEmailPreferencesRequest, + dict, + ], +) +def test_update_email_preferences_rest(request_type): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "email_preferences": {"name": "accounts/sample1/users/sample2/emailPreferences"} + } + request_init["email_preferences"] = { + "name": "accounts/sample1/users/sample2/emailPreferences", + "news_and_tips": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = emailpreferences.UpdateEmailPreferencesRequest.meta.fields[ + "email_preferences" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["email_preferences"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["email_preferences"][field])): + del request_init["email_preferences"][field][i][subfield] + else: + del request_init["email_preferences"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = emailpreferences.EmailPreferences( + name="name_value", + news_and_tips=emailpreferences.EmailPreferences.OptInState.OPTED_OUT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = emailpreferences.EmailPreferences.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_email_preferences(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, emailpreferences.EmailPreferences) + assert response.name == "name_value" + assert ( + response.news_and_tips == emailpreferences.EmailPreferences.OptInState.OPTED_OUT + ) + + +def test_update_email_preferences_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_email_preferences + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_email_preferences + ] = mock_rpc + + request = {} + client.update_email_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_email_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_email_preferences_rest_required_fields( + request_type=emailpreferences.UpdateEmailPreferencesRequest, +): + transport_class = transports.EmailPreferencesServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_email_preferences._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_email_preferences._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = emailpreferences.EmailPreferences() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = emailpreferences.EmailPreferences.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_email_preferences(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_email_preferences_rest_unset_required_fields(): + transport = transports.EmailPreferencesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_email_preferences._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "emailPreferences", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_email_preferences_rest_interceptors(null_interceptor): + transport = transports.EmailPreferencesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EmailPreferencesServiceRestInterceptor(), + ) + client = EmailPreferencesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EmailPreferencesServiceRestInterceptor, + "post_update_email_preferences", + ) as post, mock.patch.object( + transports.EmailPreferencesServiceRestInterceptor, + "pre_update_email_preferences", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = emailpreferences.UpdateEmailPreferencesRequest.pb( + emailpreferences.UpdateEmailPreferencesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = emailpreferences.EmailPreferences.to_json( + emailpreferences.EmailPreferences() + ) + + request = emailpreferences.UpdateEmailPreferencesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = emailpreferences.EmailPreferences() + + client.update_email_preferences( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_email_preferences_rest_bad_request( + transport: str = "rest", request_type=emailpreferences.UpdateEmailPreferencesRequest +): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "email_preferences": {"name": "accounts/sample1/users/sample2/emailPreferences"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_email_preferences(request) + + +def test_update_email_preferences_rest_flattened(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = emailpreferences.EmailPreferences() + + # get arguments that satisfy an http rule for this method + sample_request = { + "email_preferences": { + "name": "accounts/sample1/users/sample2/emailPreferences" + } + } + + # get truthy value for each flattened field + mock_args = dict( + email_preferences=emailpreferences.EmailPreferences(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = emailpreferences.EmailPreferences.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_email_preferences(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{email_preferences.name=accounts/*/users/*/emailPreferences}" + % client.transport._host, + args[1], + ) + + +def test_update_email_preferences_rest_flattened_error(transport: str = "rest"): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_email_preferences( + emailpreferences.UpdateEmailPreferencesRequest(), + email_preferences=emailpreferences.EmailPreferences(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_email_preferences_rest_error(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EmailPreferencesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EmailPreferencesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EmailPreferencesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EmailPreferencesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EmailPreferencesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EmailPreferencesServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EmailPreferencesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EmailPreferencesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EmailPreferencesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EmailPreferencesServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.EmailPreferencesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.EmailPreferencesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + transports.EmailPreferencesServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = EmailPreferencesServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EmailPreferencesServiceGrpcTransport, + ) + + +def test_email_preferences_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EmailPreferencesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_email_preferences_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.email_preferences_service.transports.EmailPreferencesServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.EmailPreferencesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_email_preferences", + "update_email_preferences", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_email_preferences_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.email_preferences_service.transports.EmailPreferencesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EmailPreferencesServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_email_preferences_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.email_preferences_service.transports.EmailPreferencesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EmailPreferencesServiceTransport() + adc.assert_called_once() + + +def test_email_preferences_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EmailPreferencesServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + ], +) +def test_email_preferences_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + transports.EmailPreferencesServiceRestTransport, + ], +) +def test_email_preferences_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EmailPreferencesServiceGrpcTransport, grpc_helpers), + (transports.EmailPreferencesServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_email_preferences_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + ], +) +def test_email_preferences_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_email_preferences_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.EmailPreferencesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_email_preferences_service_host_no_port(transport_name): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_email_preferences_service_host_with_port(transport_name): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_email_preferences_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EmailPreferencesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EmailPreferencesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_email_preferences._session + session2 = client2.transport.get_email_preferences._session + assert session1 != session2 + session1 = client1.transport.update_email_preferences._session + session2 = client2.transport.update_email_preferences._session + assert session1 != session2 + + +def test_email_preferences_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EmailPreferencesServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_email_preferences_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EmailPreferencesServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + ], +) +def test_email_preferences_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.EmailPreferencesServiceGrpcTransport, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + ], +) +def test_email_preferences_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_email_preferences_path(): + account = "squid" + email = "clam" + expected = "accounts/{account}/users/{email}/emailPreferences".format( + account=account, + email=email, + ) + actual = EmailPreferencesServiceClient.email_preferences_path(account, email) + assert expected == actual + + +def test_parse_email_preferences_path(): + expected = { + "account": "whelk", + "email": "octopus", + } + path = EmailPreferencesServiceClient.email_preferences_path(**expected) + + # Check that the path construction is reversible. + actual = EmailPreferencesServiceClient.parse_email_preferences_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = EmailPreferencesServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = EmailPreferencesServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EmailPreferencesServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = EmailPreferencesServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = EmailPreferencesServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EmailPreferencesServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = EmailPreferencesServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = EmailPreferencesServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EmailPreferencesServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = EmailPreferencesServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = EmailPreferencesServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EmailPreferencesServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = EmailPreferencesServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = EmailPreferencesServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EmailPreferencesServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.EmailPreferencesServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.EmailPreferencesServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = EmailPreferencesServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = EmailPreferencesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = EmailPreferencesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + EmailPreferencesServiceClient, + transports.EmailPreferencesServiceGrpcTransport, + ), + ( + EmailPreferencesServiceAsyncClient, + transports.EmailPreferencesServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py new file mode 100644 index 000000000000..8a5b6ed4e591 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py @@ -0,0 +1,4447 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.homepage_service import ( + HomepageServiceAsyncClient, + HomepageServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import homepage as gsma_homepage +from google.shopping.merchant_accounts_v1beta.types import homepage + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert HomepageServiceClient._get_default_mtls_endpoint(None) is None + assert ( + HomepageServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + HomepageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + HomepageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + HomepageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + HomepageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert HomepageServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert HomepageServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert HomepageServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + HomepageServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert HomepageServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert HomepageServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert HomepageServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + HomepageServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert HomepageServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert HomepageServiceClient._get_client_cert_source(None, False) is None + assert ( + HomepageServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + HomepageServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + HomepageServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + HomepageServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + HomepageServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceClient), +) +@mock.patch.object( + HomepageServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = HomepageServiceClient._DEFAULT_UNIVERSE + default_endpoint = HomepageServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = HomepageServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + HomepageServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + HomepageServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == HomepageServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + HomepageServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + HomepageServiceClient._get_api_endpoint(None, None, default_universe, "always") + == HomepageServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + HomepageServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == HomepageServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + HomepageServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + HomepageServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + HomepageServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + HomepageServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + HomepageServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + HomepageServiceClient._get_universe_domain(None, None) + == HomepageServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + HomepageServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (HomepageServiceClient, transports.HomepageServiceGrpcTransport, "grpc"), + (HomepageServiceClient, transports.HomepageServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (HomepageServiceClient, "grpc"), + (HomepageServiceAsyncClient, "grpc_asyncio"), + (HomepageServiceClient, "rest"), + ], +) +def test_homepage_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.HomepageServiceGrpcTransport, "grpc"), + (transports.HomepageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.HomepageServiceRestTransport, "rest"), + ], +) +def test_homepage_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (HomepageServiceClient, "grpc"), + (HomepageServiceAsyncClient, "grpc_asyncio"), + (HomepageServiceClient, "rest"), + ], +) +def test_homepage_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_homepage_service_client_get_transport_class(): + transport = HomepageServiceClient.get_transport_class() + available_transports = [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceRestTransport, + ] + assert transport in available_transports + + transport = HomepageServiceClient.get_transport_class("grpc") + assert transport == transports.HomepageServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (HomepageServiceClient, transports.HomepageServiceGrpcTransport, "grpc"), + ( + HomepageServiceAsyncClient, + transports.HomepageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (HomepageServiceClient, transports.HomepageServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + HomepageServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceClient), +) +@mock.patch.object( + HomepageServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceAsyncClient), +) +def test_homepage_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(HomepageServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(HomepageServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + HomepageServiceClient, + transports.HomepageServiceGrpcTransport, + "grpc", + "true", + ), + ( + HomepageServiceAsyncClient, + transports.HomepageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + HomepageServiceClient, + transports.HomepageServiceGrpcTransport, + "grpc", + "false", + ), + ( + HomepageServiceAsyncClient, + transports.HomepageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + HomepageServiceClient, + transports.HomepageServiceRestTransport, + "rest", + "true", + ), + ( + HomepageServiceClient, + transports.HomepageServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + HomepageServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceClient), +) +@mock.patch.object( + HomepageServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_homepage_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [HomepageServiceClient, HomepageServiceAsyncClient] +) +@mock.patch.object( + HomepageServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(HomepageServiceClient), +) +@mock.patch.object( + HomepageServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(HomepageServiceAsyncClient), +) +def test_homepage_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [HomepageServiceClient, HomepageServiceAsyncClient] +) +@mock.patch.object( + HomepageServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceClient), +) +@mock.patch.object( + HomepageServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HomepageServiceAsyncClient), +) +def test_homepage_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = HomepageServiceClient._DEFAULT_UNIVERSE + default_endpoint = HomepageServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = HomepageServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (HomepageServiceClient, transports.HomepageServiceGrpcTransport, "grpc"), + ( + HomepageServiceAsyncClient, + transports.HomepageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (HomepageServiceClient, transports.HomepageServiceRestTransport, "rest"), + ], +) +def test_homepage_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + HomepageServiceClient, + transports.HomepageServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + HomepageServiceAsyncClient, + transports.HomepageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (HomepageServiceClient, transports.HomepageServiceRestTransport, "rest", None), + ], +) +def test_homepage_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_homepage_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.homepage_service.transports.HomepageServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = HomepageServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + HomepageServiceClient, + transports.HomepageServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + HomepageServiceAsyncClient, + transports.HomepageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_homepage_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + homepage.GetHomepageRequest, + dict, + ], +) +def test_get_homepage(request_type, transport: str = "grpc"): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + response = client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = homepage.GetHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_get_homepage_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.GetHomepageRequest() + + +def test_get_homepage_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = homepage.GetHomepageRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_homepage(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.GetHomepageRequest( + name="name_value", + ) + + +def test_get_homepage_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_homepage] = mock_rpc + request = {} + client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_homepage_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.get_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.GetHomepageRequest() + + +@pytest.mark.asyncio +async def test_get_homepage_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_homepage + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_homepage + ] = mock_object + + request = {} + await client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_homepage_async( + transport: str = "grpc_asyncio", request_type=homepage.GetHomepageRequest +): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = homepage.GetHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +@pytest.mark.asyncio +async def test_get_homepage_async_from_dict(): + await test_get_homepage_async(request_type=dict) + + +def test_get_homepage_field_headers(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = homepage.GetHomepageRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + call.return_value = homepage.Homepage() + client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_homepage_field_headers_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = homepage.GetHomepageRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(homepage.Homepage()) + await client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_homepage_flattened(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = homepage.Homepage() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_homepage( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_homepage_flattened_error(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_homepage( + homepage.GetHomepageRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_homepage_flattened_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = homepage.Homepage() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(homepage.Homepage()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_homepage( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_homepage_flattened_error_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_homepage( + homepage.GetHomepageRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_homepage.UpdateHomepageRequest, + dict, + ], +) +def test_update_homepage(request_type, transport: str = "grpc"): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + response = client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsma_homepage.UpdateHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_update_homepage_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_homepage.UpdateHomepageRequest() + + +def test_update_homepage_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsma_homepage.UpdateHomepageRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_homepage(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_homepage.UpdateHomepageRequest() + + +def test_update_homepage_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_homepage] = mock_rpc + request = {} + client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_homepage_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.update_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_homepage.UpdateHomepageRequest() + + +@pytest.mark.asyncio +async def test_update_homepage_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_homepage + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_homepage + ] = mock_object + + request = {} + await client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_homepage_async( + transport: str = "grpc_asyncio", request_type=gsma_homepage.UpdateHomepageRequest +): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsma_homepage.UpdateHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +@pytest.mark.asyncio +async def test_update_homepage_async_from_dict(): + await test_update_homepage_async(request_type=dict) + + +def test_update_homepage_field_headers(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_homepage.UpdateHomepageRequest() + + request.homepage.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + call.return_value = gsma_homepage.Homepage() + client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "homepage.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_homepage_field_headers_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_homepage.UpdateHomepageRequest() + + request.homepage.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_homepage.Homepage() + ) + await client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "homepage.name=name_value", + ) in kw["metadata"] + + +def test_update_homepage_flattened(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_homepage.Homepage() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_homepage( + homepage=gsma_homepage.Homepage(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].homepage + mock_val = gsma_homepage.Homepage(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_homepage_flattened_error(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_homepage( + gsma_homepage.UpdateHomepageRequest(), + homepage=gsma_homepage.Homepage(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_homepage_flattened_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_homepage.Homepage() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_homepage.Homepage() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_homepage( + homepage=gsma_homepage.Homepage(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].homepage + mock_val = gsma_homepage.Homepage(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_homepage_flattened_error_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_homepage( + gsma_homepage.UpdateHomepageRequest(), + homepage=gsma_homepage.Homepage(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + homepage.ClaimHomepageRequest, + dict, + ], +) +def test_claim_homepage(request_type, transport: str = "grpc"): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + response = client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = homepage.ClaimHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_claim_homepage_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.claim_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.ClaimHomepageRequest() + + +def test_claim_homepage_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = homepage.ClaimHomepageRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.claim_homepage(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.ClaimHomepageRequest( + name="name_value", + ) + + +def test_claim_homepage_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.claim_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.claim_homepage] = mock_rpc + request = {} + client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.claim_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_claim_homepage_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.claim_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.ClaimHomepageRequest() + + +@pytest.mark.asyncio +async def test_claim_homepage_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.claim_homepage + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.claim_homepage + ] = mock_object + + request = {} + await client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.claim_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_claim_homepage_async( + transport: str = "grpc_asyncio", request_type=homepage.ClaimHomepageRequest +): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = homepage.ClaimHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +@pytest.mark.asyncio +async def test_claim_homepage_async_from_dict(): + await test_claim_homepage_async(request_type=dict) + + +def test_claim_homepage_field_headers(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = homepage.ClaimHomepageRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + call.return_value = homepage.Homepage() + client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_claim_homepage_field_headers_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = homepage.ClaimHomepageRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.claim_homepage), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(homepage.Homepage()) + await client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + homepage.UnclaimHomepageRequest, + dict, + ], +) +def test_unclaim_homepage(request_type, transport: str = "grpc"): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + response = client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = homepage.UnclaimHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_unclaim_homepage_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unclaim_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.UnclaimHomepageRequest() + + +def test_unclaim_homepage_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = homepage.UnclaimHomepageRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unclaim_homepage(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.UnclaimHomepageRequest( + name="name_value", + ) + + +def test_unclaim_homepage_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unclaim_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.unclaim_homepage + ] = mock_rpc + request = {} + client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unclaim_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unclaim_homepage_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.unclaim_homepage() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == homepage.UnclaimHomepageRequest() + + +@pytest.mark.asyncio +async def test_unclaim_homepage_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.unclaim_homepage + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.unclaim_homepage + ] = mock_object + + request = {} + await client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.unclaim_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_unclaim_homepage_async( + transport: str = "grpc_asyncio", request_type=homepage.UnclaimHomepageRequest +): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + ) + response = await client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = homepage.UnclaimHomepageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +@pytest.mark.asyncio +async def test_unclaim_homepage_async_from_dict(): + await test_unclaim_homepage_async(request_type=dict) + + +def test_unclaim_homepage_field_headers(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = homepage.UnclaimHomepageRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + call.return_value = homepage.Homepage() + client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_unclaim_homepage_field_headers_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = homepage.UnclaimHomepageRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unclaim_homepage), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(homepage.Homepage()) + await client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + homepage.GetHomepageRequest, + dict, + ], +) +def test_get_homepage_rest(request_type): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/homepage"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_homepage(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_get_homepage_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_homepage] = mock_rpc + + request = {} + client.get_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_homepage_rest_required_fields(request_type=homepage.GetHomepageRequest): + transport_class = transports.HomepageServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_homepage(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_homepage_rest_unset_required_fields(): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_homepage._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_homepage_rest_interceptors(null_interceptor): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HomepageServiceRestInterceptor(), + ) + client = HomepageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HomepageServiceRestInterceptor, "post_get_homepage" + ) as post, mock.patch.object( + transports.HomepageServiceRestInterceptor, "pre_get_homepage" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = homepage.GetHomepageRequest.pb(homepage.GetHomepageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = homepage.Homepage.to_json(homepage.Homepage()) + + request = homepage.GetHomepageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = homepage.Homepage() + + client.get_homepage( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_homepage_rest_bad_request( + transport: str = "rest", request_type=homepage.GetHomepageRequest +): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/homepage"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_homepage(request) + + +def test_get_homepage_rest_flattened(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/homepage"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_homepage(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/homepage}" % client.transport._host, + args[1], + ) + + +def test_get_homepage_rest_flattened_error(transport: str = "rest"): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_homepage( + homepage.GetHomepageRequest(), + name="name_value", + ) + + +def test_get_homepage_rest_error(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_homepage.UpdateHomepageRequest, + dict, + ], +) +def test_update_homepage_rest(request_type): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"homepage": {"name": "accounts/sample1/homepage"}} + request_init["homepage"] = { + "name": "accounts/sample1/homepage", + "uri": "uri_value", + "claimed": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsma_homepage.UpdateHomepageRequest.meta.fields["homepage"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["homepage"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["homepage"][field])): + del request_init["homepage"][field][i][subfield] + else: + del request_init["homepage"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_homepage(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_update_homepage_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_homepage] = mock_rpc + + request = {} + client.update_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_homepage_rest_required_fields( + request_type=gsma_homepage.UpdateHomepageRequest, +): + transport_class = transports.HomepageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_homepage._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsma_homepage.Homepage() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsma_homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_homepage(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_homepage_rest_unset_required_fields(): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_homepage._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "homepage", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_homepage_rest_interceptors(null_interceptor): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HomepageServiceRestInterceptor(), + ) + client = HomepageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HomepageServiceRestInterceptor, "post_update_homepage" + ) as post, mock.patch.object( + transports.HomepageServiceRestInterceptor, "pre_update_homepage" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsma_homepage.UpdateHomepageRequest.pb( + gsma_homepage.UpdateHomepageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsma_homepage.Homepage.to_json( + gsma_homepage.Homepage() + ) + + request = gsma_homepage.UpdateHomepageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsma_homepage.Homepage() + + client.update_homepage( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_homepage_rest_bad_request( + transport: str = "rest", request_type=gsma_homepage.UpdateHomepageRequest +): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"homepage": {"name": "accounts/sample1/homepage"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_homepage(request) + + +def test_update_homepage_rest_flattened(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_homepage.Homepage() + + # get arguments that satisfy an http rule for this method + sample_request = {"homepage": {"name": "accounts/sample1/homepage"}} + + # get truthy value for each flattened field + mock_args = dict( + homepage=gsma_homepage.Homepage(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_homepage(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{homepage.name=accounts/*/homepage}" + % client.transport._host, + args[1], + ) + + +def test_update_homepage_rest_flattened_error(transport: str = "rest"): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_homepage( + gsma_homepage.UpdateHomepageRequest(), + homepage=gsma_homepage.Homepage(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_homepage_rest_error(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + homepage.ClaimHomepageRequest, + dict, + ], +) +def test_claim_homepage_rest(request_type): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/homepage"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.claim_homepage(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_claim_homepage_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.claim_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.claim_homepage] = mock_rpc + + request = {} + client.claim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.claim_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_claim_homepage_rest_required_fields( + request_type=homepage.ClaimHomepageRequest, +): + transport_class = transports.HomepageServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).claim_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).claim_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.claim_homepage(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_claim_homepage_rest_unset_required_fields(): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.claim_homepage._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_claim_homepage_rest_interceptors(null_interceptor): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HomepageServiceRestInterceptor(), + ) + client = HomepageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HomepageServiceRestInterceptor, "post_claim_homepage" + ) as post, mock.patch.object( + transports.HomepageServiceRestInterceptor, "pre_claim_homepage" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = homepage.ClaimHomepageRequest.pb(homepage.ClaimHomepageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = homepage.Homepage.to_json(homepage.Homepage()) + + request = homepage.ClaimHomepageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = homepage.Homepage() + + client.claim_homepage( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_claim_homepage_rest_bad_request( + transport: str = "rest", request_type=homepage.ClaimHomepageRequest +): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/homepage"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.claim_homepage(request) + + +def test_claim_homepage_rest_error(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + homepage.UnclaimHomepageRequest, + dict, + ], +) +def test_unclaim_homepage_rest(request_type): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/homepage"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage( + name="name_value", + uri="uri_value", + claimed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.unclaim_homepage(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, homepage.Homepage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.claimed is True + + +def test_unclaim_homepage_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unclaim_homepage in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.unclaim_homepage + ] = mock_rpc + + request = {} + client.unclaim_homepage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unclaim_homepage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_unclaim_homepage_rest_required_fields( + request_type=homepage.UnclaimHomepageRequest, +): + transport_class = transports.HomepageServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unclaim_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unclaim_homepage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = homepage.Homepage() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = homepage.Homepage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.unclaim_homepage(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_unclaim_homepage_rest_unset_required_fields(): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.unclaim_homepage._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_unclaim_homepage_rest_interceptors(null_interceptor): + transport = transports.HomepageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HomepageServiceRestInterceptor(), + ) + client = HomepageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HomepageServiceRestInterceptor, "post_unclaim_homepage" + ) as post, mock.patch.object( + transports.HomepageServiceRestInterceptor, "pre_unclaim_homepage" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = homepage.UnclaimHomepageRequest.pb( + homepage.UnclaimHomepageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = homepage.Homepage.to_json(homepage.Homepage()) + + request = homepage.UnclaimHomepageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = homepage.Homepage() + + client.unclaim_homepage( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_unclaim_homepage_rest_bad_request( + transport: str = "rest", request_type=homepage.UnclaimHomepageRequest +): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/homepage"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unclaim_homepage(request) + + +def test_unclaim_homepage_rest_error(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.HomepageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.HomepageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HomepageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.HomepageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HomepageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HomepageServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.HomepageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HomepageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.HomepageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = HomepageServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.HomepageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.HomepageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceGrpcAsyncIOTransport, + transports.HomepageServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = HomepageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.HomepageServiceGrpcTransport, + ) + + +def test_homepage_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.HomepageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_homepage_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.homepage_service.transports.HomepageServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.HomepageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_homepage", + "update_homepage", + "claim_homepage", + "unclaim_homepage", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_homepage_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.homepage_service.transports.HomepageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HomepageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_homepage_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.homepage_service.transports.HomepageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HomepageServiceTransport() + adc.assert_called_once() + + +def test_homepage_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + HomepageServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceGrpcAsyncIOTransport, + ], +) +def test_homepage_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceGrpcAsyncIOTransport, + transports.HomepageServiceRestTransport, + ], +) +def test_homepage_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.HomepageServiceGrpcTransport, grpc_helpers), + (transports.HomepageServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_homepage_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceGrpcAsyncIOTransport, + ], +) +def test_homepage_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_homepage_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.HomepageServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_homepage_service_host_no_port(transport_name): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_homepage_service_host_with_port(transport_name): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_homepage_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = HomepageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = HomepageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_homepage._session + session2 = client2.transport.get_homepage._session + assert session1 != session2 + session1 = client1.transport.update_homepage._session + session2 = client2.transport.update_homepage._session + assert session1 != session2 + session1 = client1.transport.claim_homepage._session + session2 = client2.transport.claim_homepage._session + assert session1 != session2 + session1 = client1.transport.unclaim_homepage._session + session2 = client2.transport.unclaim_homepage._session + assert session1 != session2 + + +def test_homepage_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.HomepageServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_homepage_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.HomepageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceGrpcAsyncIOTransport, + ], +) +def test_homepage_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.HomepageServiceGrpcTransport, + transports.HomepageServiceGrpcAsyncIOTransport, + ], +) +def test_homepage_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_homepage_path(): + account = "squid" + expected = "accounts/{account}/homepage".format( + account=account, + ) + actual = HomepageServiceClient.homepage_path(account) + assert expected == actual + + +def test_parse_homepage_path(): + expected = { + "account": "clam", + } + path = HomepageServiceClient.homepage_path(**expected) + + # Check that the path construction is reversible. + actual = HomepageServiceClient.parse_homepage_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = HomepageServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = HomepageServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = HomepageServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = HomepageServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = HomepageServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = HomepageServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = HomepageServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = HomepageServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = HomepageServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = HomepageServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = HomepageServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = HomepageServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = HomepageServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = HomepageServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = HomepageServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.HomepageServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.HomepageServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = HomepageServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = HomepageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = HomepageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (HomepageServiceClient, transports.HomepageServiceGrpcTransport), + (HomepageServiceAsyncClient, transports.HomepageServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py new file mode 100644 index 000000000000..e08f3de701bb --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py @@ -0,0 +1,3762 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.online_return_policy_service import ( + OnlineReturnPolicyServiceAsyncClient, + OnlineReturnPolicyServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import online_return_policy + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OnlineReturnPolicyServiceClient._get_default_mtls_endpoint(None) is None + assert ( + OnlineReturnPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OnlineReturnPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OnlineReturnPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OnlineReturnPolicyServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + OnlineReturnPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + OnlineReturnPolicyServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + OnlineReturnPolicyServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert OnlineReturnPolicyServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert OnlineReturnPolicyServiceClient._get_client_cert_source(None, False) is None + assert ( + OnlineReturnPolicyServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + OnlineReturnPolicyServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OnlineReturnPolicyServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + OnlineReturnPolicyServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + OnlineReturnPolicyServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceClient), +) +@mock.patch.object( + OnlineReturnPolicyServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + OnlineReturnPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = OnlineReturnPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OnlineReturnPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == OnlineReturnPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OnlineReturnPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + OnlineReturnPolicyServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + OnlineReturnPolicyServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OnlineReturnPolicyServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + OnlineReturnPolicyServiceClient._get_universe_domain(None, None) + == OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + OnlineReturnPolicyServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OnlineReturnPolicyServiceClient, "grpc"), + (OnlineReturnPolicyServiceAsyncClient, "grpc_asyncio"), + (OnlineReturnPolicyServiceClient, "rest"), + ], +) +def test_online_return_policy_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OnlineReturnPolicyServiceGrpcTransport, "grpc"), + (transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.OnlineReturnPolicyServiceRestTransport, "rest"), + ], +) +def test_online_return_policy_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OnlineReturnPolicyServiceClient, "grpc"), + (OnlineReturnPolicyServiceAsyncClient, "grpc_asyncio"), + (OnlineReturnPolicyServiceClient, "rest"), + ], +) +def test_online_return_policy_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_online_return_policy_service_client_get_transport_class(): + transport = OnlineReturnPolicyServiceClient.get_transport_class() + available_transports = [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceRestTransport, + ] + assert transport in available_transports + + transport = OnlineReturnPolicyServiceClient.get_transport_class("grpc") + assert transport == transports.OnlineReturnPolicyServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + OnlineReturnPolicyServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceClient), +) +@mock.patch.object( + OnlineReturnPolicyServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceAsyncClient), +) +def test_online_return_policy_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + OnlineReturnPolicyServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + OnlineReturnPolicyServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + "true", + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + "false", + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceRestTransport, + "rest", + "true", + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + OnlineReturnPolicyServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceClient), +) +@mock.patch.object( + OnlineReturnPolicyServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_online_return_policy_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [OnlineReturnPolicyServiceClient, OnlineReturnPolicyServiceAsyncClient], +) +@mock.patch.object( + OnlineReturnPolicyServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OnlineReturnPolicyServiceClient), +) +@mock.patch.object( + OnlineReturnPolicyServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OnlineReturnPolicyServiceAsyncClient), +) +def test_online_return_policy_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [OnlineReturnPolicyServiceClient, OnlineReturnPolicyServiceAsyncClient], +) +@mock.patch.object( + OnlineReturnPolicyServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceClient), +) +@mock.patch.object( + OnlineReturnPolicyServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OnlineReturnPolicyServiceAsyncClient), +) +def test_online_return_policy_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = OnlineReturnPolicyServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + OnlineReturnPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = OnlineReturnPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceRestTransport, + "rest", + ), + ], +) +def test_online_return_policy_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceRestTransport, + "rest", + None, + ), + ], +) +def test_online_return_policy_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_online_return_policy_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.transports.OnlineReturnPolicyServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = OnlineReturnPolicyServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_online_return_policy_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + online_return_policy.GetOnlineReturnPolicyRequest, + dict, + ], +) +def test_get_online_return_policy(request_type, transport: str = "grpc"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = online_return_policy.OnlineReturnPolicy( + name="name_value", + return_policy_id="return_policy_id_value", + label="label_value", + countries=["countries_value"], + return_methods=[ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ], + item_conditions=[online_return_policy.OnlineReturnPolicy.ItemCondition.NEW], + return_policy_uri="return_policy_uri_value", + accept_defective_only=True, + process_refund_days=2034, + accept_exchange=True, + ) + response = client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = online_return_policy.GetOnlineReturnPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, online_return_policy.OnlineReturnPolicy) + assert response.name == "name_value" + assert response.return_policy_id == "return_policy_id_value" + assert response.label == "label_value" + assert response.countries == ["countries_value"] + assert response.return_methods == [ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ] + assert response.item_conditions == [ + online_return_policy.OnlineReturnPolicy.ItemCondition.NEW + ] + assert response.return_policy_uri == "return_policy_uri_value" + assert response.accept_defective_only is True + assert response.process_refund_days == 2034 + assert response.accept_exchange is True + + +def test_get_online_return_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_online_return_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == online_return_policy.GetOnlineReturnPolicyRequest() + + +def test_get_online_return_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = online_return_policy.GetOnlineReturnPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_online_return_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == online_return_policy.GetOnlineReturnPolicyRequest( + name="name_value", + ) + + +def test_get_online_return_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_online_return_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_online_return_policy + ] = mock_rpc + request = {} + client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_online_return_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_online_return_policy_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.OnlineReturnPolicy( + name="name_value", + return_policy_id="return_policy_id_value", + label="label_value", + countries=["countries_value"], + return_methods=[ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ], + item_conditions=[ + online_return_policy.OnlineReturnPolicy.ItemCondition.NEW + ], + return_policy_uri="return_policy_uri_value", + accept_defective_only=True, + process_refund_days=2034, + accept_exchange=True, + ) + ) + response = await client.get_online_return_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == online_return_policy.GetOnlineReturnPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_online_return_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_online_return_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_online_return_policy + ] = mock_object + + request = {} + await client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_online_return_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_online_return_policy_async( + transport: str = "grpc_asyncio", + request_type=online_return_policy.GetOnlineReturnPolicyRequest, +): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.OnlineReturnPolicy( + name="name_value", + return_policy_id="return_policy_id_value", + label="label_value", + countries=["countries_value"], + return_methods=[ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ], + item_conditions=[ + online_return_policy.OnlineReturnPolicy.ItemCondition.NEW + ], + return_policy_uri="return_policy_uri_value", + accept_defective_only=True, + process_refund_days=2034, + accept_exchange=True, + ) + ) + response = await client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = online_return_policy.GetOnlineReturnPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, online_return_policy.OnlineReturnPolicy) + assert response.name == "name_value" + assert response.return_policy_id == "return_policy_id_value" + assert response.label == "label_value" + assert response.countries == ["countries_value"] + assert response.return_methods == [ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ] + assert response.item_conditions == [ + online_return_policy.OnlineReturnPolicy.ItemCondition.NEW + ] + assert response.return_policy_uri == "return_policy_uri_value" + assert response.accept_defective_only is True + assert response.process_refund_days == 2034 + assert response.accept_exchange is True + + +@pytest.mark.asyncio +async def test_get_online_return_policy_async_from_dict(): + await test_get_online_return_policy_async(request_type=dict) + + +def test_get_online_return_policy_field_headers(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = online_return_policy.GetOnlineReturnPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + call.return_value = online_return_policy.OnlineReturnPolicy() + client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_online_return_policy_field_headers_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = online_return_policy.GetOnlineReturnPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.OnlineReturnPolicy() + ) + await client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_online_return_policy_flattened(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = online_return_policy.OnlineReturnPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_online_return_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_online_return_policy_flattened_error(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_online_return_policy( + online_return_policy.GetOnlineReturnPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_online_return_policy_flattened_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_online_return_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = online_return_policy.OnlineReturnPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.OnlineReturnPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_online_return_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_online_return_policy_flattened_error_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_online_return_policy( + online_return_policy.GetOnlineReturnPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + online_return_policy.ListOnlineReturnPoliciesRequest, + dict, + ], +) +def test_list_online_return_policies(request_type, transport: str = "grpc"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = online_return_policy.ListOnlineReturnPoliciesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = online_return_policy.ListOnlineReturnPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOnlineReturnPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_online_return_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_online_return_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == online_return_policy.ListOnlineReturnPoliciesRequest() + + +def test_list_online_return_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = online_return_policy.ListOnlineReturnPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_online_return_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == online_return_policy.ListOnlineReturnPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_online_return_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_online_return_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_online_return_policies + ] = mock_rpc + request = {} + client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_online_return_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_online_return_policies_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.ListOnlineReturnPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_online_return_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == online_return_policy.ListOnlineReturnPoliciesRequest() + + +@pytest.mark.asyncio +async def test_list_online_return_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_online_return_policies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_online_return_policies + ] = mock_object + + request = {} + await client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_online_return_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_online_return_policies_async( + transport: str = "grpc_asyncio", + request_type=online_return_policy.ListOnlineReturnPoliciesRequest, +): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.ListOnlineReturnPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = online_return_policy.ListOnlineReturnPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOnlineReturnPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_online_return_policies_async_from_dict(): + await test_list_online_return_policies_async(request_type=dict) + + +def test_list_online_return_policies_field_headers(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = online_return_policy.ListOnlineReturnPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + call.return_value = online_return_policy.ListOnlineReturnPoliciesResponse() + client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_online_return_policies_field_headers_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = online_return_policy.ListOnlineReturnPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.ListOnlineReturnPoliciesResponse() + ) + await client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_online_return_policies_flattened(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = online_return_policy.ListOnlineReturnPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_online_return_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_online_return_policies_flattened_error(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_online_return_policies( + online_return_policy.ListOnlineReturnPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_online_return_policies_flattened_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = online_return_policy.ListOnlineReturnPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + online_return_policy.ListOnlineReturnPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_online_return_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_online_return_policies_flattened_error_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_online_return_policies( + online_return_policy.ListOnlineReturnPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_online_return_policies_pager(transport_name: str = "grpc"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="abc", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[], + next_page_token="def", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="ghi", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_online_return_policies(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, online_return_policy.OnlineReturnPolicy) for i in results + ) + + +def test_list_online_return_policies_pages(transport_name: str = "grpc"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="abc", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[], + next_page_token="def", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="ghi", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_online_return_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_online_return_policies_async_pager(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="abc", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[], + next_page_token="def", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="ghi", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_online_return_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, online_return_policy.OnlineReturnPolicy) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_online_return_policies_async_pages(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_online_return_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="abc", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[], + next_page_token="def", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="ghi", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_online_return_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + online_return_policy.GetOnlineReturnPolicyRequest, + dict, + ], +) +def test_get_online_return_policy_rest(request_type): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/onlineReturnPolicies/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = online_return_policy.OnlineReturnPolicy( + name="name_value", + return_policy_id="return_policy_id_value", + label="label_value", + countries=["countries_value"], + return_methods=[ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ], + item_conditions=[online_return_policy.OnlineReturnPolicy.ItemCondition.NEW], + return_policy_uri="return_policy_uri_value", + accept_defective_only=True, + process_refund_days=2034, + accept_exchange=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = online_return_policy.OnlineReturnPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_online_return_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, online_return_policy.OnlineReturnPolicy) + assert response.name == "name_value" + assert response.return_policy_id == "return_policy_id_value" + assert response.label == "label_value" + assert response.countries == ["countries_value"] + assert response.return_methods == [ + online_return_policy.OnlineReturnPolicy.ReturnMethod.BY_MAIL + ] + assert response.item_conditions == [ + online_return_policy.OnlineReturnPolicy.ItemCondition.NEW + ] + assert response.return_policy_uri == "return_policy_uri_value" + assert response.accept_defective_only is True + assert response.process_refund_days == 2034 + assert response.accept_exchange is True + + +def test_get_online_return_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_online_return_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_online_return_policy + ] = mock_rpc + + request = {} + client.get_online_return_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_online_return_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_online_return_policy_rest_required_fields( + request_type=online_return_policy.GetOnlineReturnPolicyRequest, +): + transport_class = transports.OnlineReturnPolicyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_online_return_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_online_return_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = online_return_policy.OnlineReturnPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = online_return_policy.OnlineReturnPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_online_return_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_online_return_policy_rest_unset_required_fields(): + transport = transports.OnlineReturnPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_online_return_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_online_return_policy_rest_interceptors(null_interceptor): + transport = transports.OnlineReturnPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OnlineReturnPolicyServiceRestInterceptor(), + ) + client = OnlineReturnPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OnlineReturnPolicyServiceRestInterceptor, + "post_get_online_return_policy", + ) as post, mock.patch.object( + transports.OnlineReturnPolicyServiceRestInterceptor, + "pre_get_online_return_policy", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = online_return_policy.GetOnlineReturnPolicyRequest.pb( + online_return_policy.GetOnlineReturnPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = online_return_policy.OnlineReturnPolicy.to_json( + online_return_policy.OnlineReturnPolicy() + ) + + request = online_return_policy.GetOnlineReturnPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = online_return_policy.OnlineReturnPolicy() + + client.get_online_return_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_online_return_policy_rest_bad_request( + transport: str = "rest", + request_type=online_return_policy.GetOnlineReturnPolicyRequest, +): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/onlineReturnPolicies/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_online_return_policy(request) + + +def test_get_online_return_policy_rest_flattened(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = online_return_policy.OnlineReturnPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/onlineReturnPolicies/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = online_return_policy.OnlineReturnPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_online_return_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/onlineReturnPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_online_return_policy_rest_flattened_error(transport: str = "rest"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_online_return_policy( + online_return_policy.GetOnlineReturnPolicyRequest(), + name="name_value", + ) + + +def test_get_online_return_policy_rest_error(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + online_return_policy.ListOnlineReturnPoliciesRequest, + dict, + ], +) +def test_list_online_return_policies_rest(request_type): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = online_return_policy.ListOnlineReturnPoliciesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = online_return_policy.ListOnlineReturnPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_online_return_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOnlineReturnPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_online_return_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_online_return_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_online_return_policies + ] = mock_rpc + + request = {} + client.list_online_return_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_online_return_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_online_return_policies_rest_required_fields( + request_type=online_return_policy.ListOnlineReturnPoliciesRequest, +): + transport_class = transports.OnlineReturnPolicyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_online_return_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_online_return_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = online_return_policy.ListOnlineReturnPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = online_return_policy.ListOnlineReturnPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_online_return_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_online_return_policies_rest_unset_required_fields(): + transport = transports.OnlineReturnPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_online_return_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_online_return_policies_rest_interceptors(null_interceptor): + transport = transports.OnlineReturnPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OnlineReturnPolicyServiceRestInterceptor(), + ) + client = OnlineReturnPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OnlineReturnPolicyServiceRestInterceptor, + "post_list_online_return_policies", + ) as post, mock.patch.object( + transports.OnlineReturnPolicyServiceRestInterceptor, + "pre_list_online_return_policies", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = online_return_policy.ListOnlineReturnPoliciesRequest.pb( + online_return_policy.ListOnlineReturnPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + online_return_policy.ListOnlineReturnPoliciesResponse.to_json( + online_return_policy.ListOnlineReturnPoliciesResponse() + ) + ) + + request = online_return_policy.ListOnlineReturnPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = online_return_policy.ListOnlineReturnPoliciesResponse() + + client.list_online_return_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_online_return_policies_rest_bad_request( + transport: str = "rest", + request_type=online_return_policy.ListOnlineReturnPoliciesRequest, +): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_online_return_policies(request) + + +def test_list_online_return_policies_rest_flattened(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = online_return_policy.ListOnlineReturnPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = online_return_policy.ListOnlineReturnPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_online_return_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/onlineReturnPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_online_return_policies_rest_flattened_error(transport: str = "rest"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_online_return_policies( + online_return_policy.ListOnlineReturnPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_online_return_policies_rest_pager(transport: str = "rest"): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="abc", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[], + next_page_token="def", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + ], + next_page_token="ghi", + ), + online_return_policy.ListOnlineReturnPoliciesResponse( + online_return_policies=[ + online_return_policy.OnlineReturnPolicy(), + online_return_policy.OnlineReturnPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + online_return_policy.ListOnlineReturnPoliciesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_online_return_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, online_return_policy.OnlineReturnPolicy) for i in results + ) + + pages = list(client.list_online_return_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OnlineReturnPolicyServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OnlineReturnPolicyServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OnlineReturnPolicyServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OnlineReturnPolicyServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OnlineReturnPolicyServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + transports.OnlineReturnPolicyServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = OnlineReturnPolicyServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.OnlineReturnPolicyServiceGrpcTransport, + ) + + +def test_online_return_policy_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OnlineReturnPolicyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_online_return_policy_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.transports.OnlineReturnPolicyServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OnlineReturnPolicyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_online_return_policy", + "list_online_return_policies", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_online_return_policy_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.transports.OnlineReturnPolicyServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OnlineReturnPolicyServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_online_return_policy_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.online_return_policy_service.transports.OnlineReturnPolicyServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OnlineReturnPolicyServiceTransport() + adc.assert_called_once() + + +def test_online_return_policy_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OnlineReturnPolicyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_online_return_policy_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + transports.OnlineReturnPolicyServiceRestTransport, + ], +) +def test_online_return_policy_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.OnlineReturnPolicyServiceGrpcTransport, grpc_helpers), + (transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_online_return_policy_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_online_return_policy_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_online_return_policy_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OnlineReturnPolicyServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_online_return_policy_service_host_no_port(transport_name): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_online_return_policy_service_host_with_port(transport_name): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_online_return_policy_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OnlineReturnPolicyServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OnlineReturnPolicyServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_online_return_policy._session + session2 = client2.transport.get_online_return_policy._session + assert session1 != session2 + session1 = client1.transport.list_online_return_policies._session + session2 = client2.transport.list_online_return_policies._session + assert session1 != session2 + + +def test_online_return_policy_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OnlineReturnPolicyServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_online_return_policy_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_online_return_policy_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OnlineReturnPolicyServiceGrpcTransport, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_online_return_policy_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_online_return_policy_path(): + account = "squid" + return_policy = "clam" + expected = "accounts/{account}/onlineReturnPolicies/{return_policy}".format( + account=account, + return_policy=return_policy, + ) + actual = OnlineReturnPolicyServiceClient.online_return_policy_path( + account, return_policy + ) + assert expected == actual + + +def test_parse_online_return_policy_path(): + expected = { + "account": "whelk", + "return_policy": "octopus", + } + path = OnlineReturnPolicyServiceClient.online_return_policy_path(**expected) + + # Check that the path construction is reversible. + actual = OnlineReturnPolicyServiceClient.parse_online_return_policy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OnlineReturnPolicyServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = OnlineReturnPolicyServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OnlineReturnPolicyServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OnlineReturnPolicyServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = OnlineReturnPolicyServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OnlineReturnPolicyServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OnlineReturnPolicyServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = OnlineReturnPolicyServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OnlineReturnPolicyServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = OnlineReturnPolicyServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = OnlineReturnPolicyServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OnlineReturnPolicyServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OnlineReturnPolicyServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = OnlineReturnPolicyServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OnlineReturnPolicyServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OnlineReturnPolicyServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OnlineReturnPolicyServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OnlineReturnPolicyServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = OnlineReturnPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = OnlineReturnPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + OnlineReturnPolicyServiceClient, + transports.OnlineReturnPolicyServiceGrpcTransport, + ), + ( + OnlineReturnPolicyServiceAsyncClient, + transports.OnlineReturnPolicyServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py new file mode 100644 index 000000000000..06144275fd4e --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py @@ -0,0 +1,4895 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.programs_service import ( + ProgramsServiceAsyncClient, + ProgramsServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import programs + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProgramsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ProgramsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ProgramsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ProgramsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProgramsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProgramsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ProgramsServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ProgramsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ProgramsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ProgramsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ProgramsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ProgramsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ProgramsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ProgramsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ProgramsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ProgramsServiceClient._get_client_cert_source(None, False) is None + assert ( + ProgramsServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ProgramsServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ProgramsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ProgramsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ProgramsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceClient), +) +@mock.patch.object( + ProgramsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ProgramsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProgramsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProgramsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ProgramsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ProgramsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ProgramsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProgramsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ProgramsServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ProgramsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProgramsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ProgramsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProgramsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ProgramsServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ProgramsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ProgramsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ProgramsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ProgramsServiceClient._get_universe_domain(None, None) + == ProgramsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ProgramsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProgramsServiceClient, transports.ProgramsServiceGrpcTransport, "grpc"), + (ProgramsServiceClient, transports.ProgramsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProgramsServiceClient, "grpc"), + (ProgramsServiceAsyncClient, "grpc_asyncio"), + (ProgramsServiceClient, "rest"), + ], +) +def test_programs_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ProgramsServiceGrpcTransport, "grpc"), + (transports.ProgramsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ProgramsServiceRestTransport, "rest"), + ], +) +def test_programs_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProgramsServiceClient, "grpc"), + (ProgramsServiceAsyncClient, "grpc_asyncio"), + (ProgramsServiceClient, "rest"), + ], +) +def test_programs_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_programs_service_client_get_transport_class(): + transport = ProgramsServiceClient.get_transport_class() + available_transports = [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceRestTransport, + ] + assert transport in available_transports + + transport = ProgramsServiceClient.get_transport_class("grpc") + assert transport == transports.ProgramsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProgramsServiceClient, transports.ProgramsServiceGrpcTransport, "grpc"), + ( + ProgramsServiceAsyncClient, + transports.ProgramsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProgramsServiceClient, transports.ProgramsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ProgramsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceClient), +) +@mock.patch.object( + ProgramsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceAsyncClient), +) +def test_programs_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProgramsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProgramsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ProgramsServiceClient, + transports.ProgramsServiceGrpcTransport, + "grpc", + "true", + ), + ( + ProgramsServiceAsyncClient, + transports.ProgramsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ProgramsServiceClient, + transports.ProgramsServiceGrpcTransport, + "grpc", + "false", + ), + ( + ProgramsServiceAsyncClient, + transports.ProgramsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ProgramsServiceClient, + transports.ProgramsServiceRestTransport, + "rest", + "true", + ), + ( + ProgramsServiceClient, + transports.ProgramsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ProgramsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceClient), +) +@mock.patch.object( + ProgramsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_programs_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ProgramsServiceClient, ProgramsServiceAsyncClient] +) +@mock.patch.object( + ProgramsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProgramsServiceClient), +) +@mock.patch.object( + ProgramsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProgramsServiceAsyncClient), +) +def test_programs_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ProgramsServiceClient, ProgramsServiceAsyncClient] +) +@mock.patch.object( + ProgramsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceClient), +) +@mock.patch.object( + ProgramsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProgramsServiceAsyncClient), +) +def test_programs_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ProgramsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProgramsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProgramsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProgramsServiceClient, transports.ProgramsServiceGrpcTransport, "grpc"), + ( + ProgramsServiceAsyncClient, + transports.ProgramsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProgramsServiceClient, transports.ProgramsServiceRestTransport, "rest"), + ], +) +def test_programs_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProgramsServiceClient, + transports.ProgramsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProgramsServiceAsyncClient, + transports.ProgramsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ProgramsServiceClient, transports.ProgramsServiceRestTransport, "rest", None), + ], +) +def test_programs_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_programs_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.programs_service.transports.ProgramsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ProgramsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProgramsServiceClient, + transports.ProgramsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProgramsServiceAsyncClient, + transports.ProgramsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_programs_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + programs.GetProgramRequest, + dict, + ], +) +def test_get_program(request_type, transport: str = "grpc"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + response = client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = programs.GetProgramRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +def test_get_program_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_program() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.GetProgramRequest() + + +def test_get_program_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = programs.GetProgramRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_program(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.GetProgramRequest( + name="name_value", + ) + + +def test_get_program_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_program in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_program] = mock_rpc + request = {} + client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_program_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + ) + response = await client.get_program() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.GetProgramRequest() + + +@pytest.mark.asyncio +async def test_get_program_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_program + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_program + ] = mock_object + + request = {} + await client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_program_async( + transport: str = "grpc_asyncio", request_type=programs.GetProgramRequest +): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + ) + response = await client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = programs.GetProgramRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +@pytest.mark.asyncio +async def test_get_program_async_from_dict(): + await test_get_program_async(request_type=dict) + + +def test_get_program_field_headers(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.GetProgramRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + call.return_value = programs.Program() + client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_program_field_headers_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.GetProgramRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(programs.Program()) + await client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_program_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_program( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_program_flattened_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_program( + programs.GetProgramRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_program_flattened_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(programs.Program()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_program( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_program_flattened_error_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_program( + programs.GetProgramRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + programs.ListProgramsRequest, + dict, + ], +) +def test_list_programs(request_type, transport: str = "grpc"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.ListProgramsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = programs.ListProgramsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProgramsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_programs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_programs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.ListProgramsRequest() + + +def test_list_programs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = programs.ListProgramsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_programs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.ListProgramsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_programs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_programs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_programs] = mock_rpc + request = {} + client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_programs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_programs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.ListProgramsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_programs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.ListProgramsRequest() + + +@pytest.mark.asyncio +async def test_list_programs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_programs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_programs + ] = mock_object + + request = {} + await client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_programs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_programs_async( + transport: str = "grpc_asyncio", request_type=programs.ListProgramsRequest +): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.ListProgramsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = programs.ListProgramsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProgramsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_programs_async_from_dict(): + await test_list_programs_async(request_type=dict) + + +def test_list_programs_field_headers(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.ListProgramsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + call.return_value = programs.ListProgramsResponse() + client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_programs_field_headers_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.ListProgramsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.ListProgramsResponse() + ) + await client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_programs_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.ListProgramsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_programs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_programs_flattened_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_programs( + programs.ListProgramsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_programs_flattened_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.ListProgramsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.ListProgramsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_programs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_programs_flattened_error_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_programs( + programs.ListProgramsRequest(), + parent="parent_value", + ) + + +def test_list_programs_pager(transport_name: str = "grpc"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + programs.Program(), + ], + next_page_token="abc", + ), + programs.ListProgramsResponse( + programs=[], + next_page_token="def", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + ], + next_page_token="ghi", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_programs(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, programs.Program) for i in results) + + +def test_list_programs_pages(transport_name: str = "grpc"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_programs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + programs.Program(), + ], + next_page_token="abc", + ), + programs.ListProgramsResponse( + programs=[], + next_page_token="def", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + ], + next_page_token="ghi", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + ], + ), + RuntimeError, + ) + pages = list(client.list_programs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_programs_async_pager(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_programs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + programs.Program(), + ], + next_page_token="abc", + ), + programs.ListProgramsResponse( + programs=[], + next_page_token="def", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + ], + next_page_token="ghi", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_programs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, programs.Program) for i in responses) + + +@pytest.mark.asyncio +async def test_list_programs_async_pages(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_programs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + programs.Program(), + ], + next_page_token="abc", + ), + programs.ListProgramsResponse( + programs=[], + next_page_token="def", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + ], + next_page_token="ghi", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_programs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + programs.EnableProgramRequest, + dict, + ], +) +def test_enable_program(request_type, transport: str = "grpc"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + response = client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = programs.EnableProgramRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +def test_enable_program_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enable_program() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.EnableProgramRequest() + + +def test_enable_program_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = programs.EnableProgramRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enable_program(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.EnableProgramRequest( + name="name_value", + ) + + +def test_enable_program_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.enable_program in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.enable_program] = mock_rpc + request = {} + client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enable_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enable_program_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + ) + response = await client.enable_program() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.EnableProgramRequest() + + +@pytest.mark.asyncio +async def test_enable_program_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.enable_program + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.enable_program + ] = mock_object + + request = {} + await client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.enable_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_enable_program_async( + transport: str = "grpc_asyncio", request_type=programs.EnableProgramRequest +): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + ) + response = await client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = programs.EnableProgramRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +@pytest.mark.asyncio +async def test_enable_program_async_from_dict(): + await test_enable_program_async(request_type=dict) + + +def test_enable_program_field_headers(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.EnableProgramRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + call.return_value = programs.Program() + client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enable_program_field_headers_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.EnableProgramRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(programs.Program()) + await client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_enable_program_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.enable_program( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_enable_program_flattened_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_program( + programs.EnableProgramRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_enable_program_flattened_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.enable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(programs.Program()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.enable_program( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_enable_program_flattened_error_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.enable_program( + programs.EnableProgramRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + programs.DisableProgramRequest, + dict, + ], +) +def test_disable_program(request_type, transport: str = "grpc"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + response = client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = programs.DisableProgramRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +def test_disable_program_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.disable_program() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.DisableProgramRequest() + + +def test_disable_program_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = programs.DisableProgramRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.disable_program(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.DisableProgramRequest( + name="name_value", + ) + + +def test_disable_program_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_program in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.disable_program] = mock_rpc + request = {} + client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.disable_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_disable_program_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + ) + response = await client.disable_program() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == programs.DisableProgramRequest() + + +@pytest.mark.asyncio +async def test_disable_program_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.disable_program + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.disable_program + ] = mock_object + + request = {} + await client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.disable_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_disable_program_async( + transport: str = "grpc_asyncio", request_type=programs.DisableProgramRequest +): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + ) + response = await client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = programs.DisableProgramRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +@pytest.mark.asyncio +async def test_disable_program_async_from_dict(): + await test_disable_program_async(request_type=dict) + + +def test_disable_program_field_headers(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.DisableProgramRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + call.return_value = programs.Program() + client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_disable_program_field_headers_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = programs.DisableProgramRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(programs.Program()) + await client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_disable_program_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.disable_program( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_disable_program_flattened_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_program( + programs.DisableProgramRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_disable_program_flattened_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.disable_program), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = programs.Program() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(programs.Program()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.disable_program( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_disable_program_flattened_error_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.disable_program( + programs.DisableProgramRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + programs.GetProgramRequest, + dict, + ], +) +def test_get_program_rest(request_type): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/programs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_program(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +def test_get_program_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_program in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_program] = mock_rpc + + request = {} + client.get_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_program_rest_required_fields(request_type=programs.GetProgramRequest): + transport_class = transports.ProgramsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_program._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_program._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = programs.Program() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_program(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_program_rest_unset_required_fields(): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_program._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_program_rest_interceptors(null_interceptor): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProgramsServiceRestInterceptor(), + ) + client = ProgramsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "post_get_program" + ) as post, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "pre_get_program" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = programs.GetProgramRequest.pb(programs.GetProgramRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = programs.Program.to_json(programs.Program()) + + request = programs.GetProgramRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = programs.Program() + + client.get_program( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_program_rest_bad_request( + transport: str = "rest", request_type=programs.GetProgramRequest +): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/programs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_program(request) + + +def test_get_program_rest_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.Program() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/programs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_program(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/programs/*}" % client.transport._host, + args[1], + ) + + +def test_get_program_rest_flattened_error(transport: str = "rest"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_program( + programs.GetProgramRequest(), + name="name_value", + ) + + +def test_get_program_rest_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + programs.ListProgramsRequest, + dict, + ], +) +def test_list_programs_rest(request_type): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.ListProgramsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.ListProgramsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_programs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProgramsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_programs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_programs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_programs] = mock_rpc + + request = {} + client.list_programs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_programs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_programs_rest_required_fields(request_type=programs.ListProgramsRequest): + transport_class = transports.ProgramsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_programs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_programs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = programs.ListProgramsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = programs.ListProgramsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_programs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_programs_rest_unset_required_fields(): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_programs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_programs_rest_interceptors(null_interceptor): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProgramsServiceRestInterceptor(), + ) + client = ProgramsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "post_list_programs" + ) as post, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "pre_list_programs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = programs.ListProgramsRequest.pb(programs.ListProgramsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = programs.ListProgramsResponse.to_json( + programs.ListProgramsResponse() + ) + + request = programs.ListProgramsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = programs.ListProgramsResponse() + + client.list_programs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_programs_rest_bad_request( + transport: str = "rest", request_type=programs.ListProgramsRequest +): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_programs(request) + + +def test_list_programs_rest_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.ListProgramsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.ListProgramsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_programs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/programs" % client.transport._host, + args[1], + ) + + +def test_list_programs_rest_flattened_error(transport: str = "rest"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_programs( + programs.ListProgramsRequest(), + parent="parent_value", + ) + + +def test_list_programs_rest_pager(transport: str = "rest"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + programs.Program(), + ], + next_page_token="abc", + ), + programs.ListProgramsResponse( + programs=[], + next_page_token="def", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + ], + next_page_token="ghi", + ), + programs.ListProgramsResponse( + programs=[ + programs.Program(), + programs.Program(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(programs.ListProgramsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_programs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, programs.Program) for i in results) + + pages = list(client.list_programs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + programs.EnableProgramRequest, + dict, + ], +) +def test_enable_program_rest(request_type): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/programs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_program(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +def test_enable_program_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.enable_program in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.enable_program] = mock_rpc + + request = {} + client.enable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enable_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_enable_program_rest_required_fields( + request_type=programs.EnableProgramRequest, +): + transport_class = transports.ProgramsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_program._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_program._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = programs.Program() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_program(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_program_rest_unset_required_fields(): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enable_program._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_program_rest_interceptors(null_interceptor): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProgramsServiceRestInterceptor(), + ) + client = ProgramsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "post_enable_program" + ) as post, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "pre_enable_program" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = programs.EnableProgramRequest.pb(programs.EnableProgramRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = programs.Program.to_json(programs.Program()) + + request = programs.EnableProgramRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = programs.Program() + + client.enable_program( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_program_rest_bad_request( + transport: str = "rest", request_type=programs.EnableProgramRequest +): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/programs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_program(request) + + +def test_enable_program_rest_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.Program() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/programs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enable_program(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/programs/*}:enable" + % client.transport._host, + args[1], + ) + + +def test_enable_program_rest_flattened_error(transport: str = "rest"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_program( + programs.EnableProgramRequest(), + name="name_value", + ) + + +def test_enable_program_rest_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + programs.DisableProgramRequest, + dict, + ], +) +def test_disable_program_rest(request_type): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/programs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.Program( + name="name_value", + documentation_uri="documentation_uri_value", + state=programs.Program.State.NOT_ELIGIBLE, + active_region_codes=["active_region_codes_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_program(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, programs.Program) + assert response.name == "name_value" + assert response.documentation_uri == "documentation_uri_value" + assert response.state == programs.Program.State.NOT_ELIGIBLE + assert response.active_region_codes == ["active_region_codes_value"] + + +def test_disable_program_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_program in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.disable_program] = mock_rpc + + request = {} + client.disable_program(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.disable_program(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_disable_program_rest_required_fields( + request_type=programs.DisableProgramRequest, +): + transport_class = transports.ProgramsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_program._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_program._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = programs.Program() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_program(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_program_rest_unset_required_fields(): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_program._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_program_rest_interceptors(null_interceptor): + transport = transports.ProgramsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProgramsServiceRestInterceptor(), + ) + client = ProgramsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "post_disable_program" + ) as post, mock.patch.object( + transports.ProgramsServiceRestInterceptor, "pre_disable_program" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = programs.DisableProgramRequest.pb(programs.DisableProgramRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = programs.Program.to_json(programs.Program()) + + request = programs.DisableProgramRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = programs.Program() + + client.disable_program( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_program_rest_bad_request( + transport: str = "rest", request_type=programs.DisableProgramRequest +): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/programs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_program(request) + + +def test_disable_program_rest_flattened(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = programs.Program() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/programs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = programs.Program.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.disable_program(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/programs/*}:disable" + % client.transport._host, + args[1], + ) + + +def test_disable_program_rest_flattened_error(transport: str = "rest"): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_program( + programs.DisableProgramRequest(), + name="name_value", + ) + + +def test_disable_program_rest_error(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProgramsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProgramsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProgramsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProgramsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProgramsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProgramsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProgramsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProgramsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProgramsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProgramsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProgramsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ProgramsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceGrpcAsyncIOTransport, + transports.ProgramsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProgramsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ProgramsServiceGrpcTransport, + ) + + +def test_programs_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProgramsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_programs_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.programs_service.transports.ProgramsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProgramsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_program", + "list_programs", + "enable_program", + "disable_program", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_programs_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.programs_service.transports.ProgramsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProgramsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_programs_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.programs_service.transports.ProgramsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProgramsServiceTransport() + adc.assert_called_once() + + +def test_programs_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProgramsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceGrpcAsyncIOTransport, + ], +) +def test_programs_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceGrpcAsyncIOTransport, + transports.ProgramsServiceRestTransport, + ], +) +def test_programs_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ProgramsServiceGrpcTransport, grpc_helpers), + (transports.ProgramsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_programs_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceGrpcAsyncIOTransport, + ], +) +def test_programs_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_programs_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProgramsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_programs_service_host_no_port(transport_name): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_programs_service_host_with_port(transport_name): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_programs_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProgramsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProgramsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_program._session + session2 = client2.transport.get_program._session + assert session1 != session2 + session1 = client1.transport.list_programs._session + session2 = client2.transport.list_programs._session + assert session1 != session2 + session1 = client1.transport.enable_program._session + session2 = client2.transport.enable_program._session + assert session1 != session2 + session1 = client1.transport.disable_program._session + session2 = client2.transport.disable_program._session + assert session1 != session2 + + +def test_programs_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProgramsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_programs_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProgramsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceGrpcAsyncIOTransport, + ], +) +def test_programs_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProgramsServiceGrpcTransport, + transports.ProgramsServiceGrpcAsyncIOTransport, + ], +) +def test_programs_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_program_path(): + account = "squid" + program = "clam" + expected = "accounts/{account}/programs/{program}".format( + account=account, + program=program, + ) + actual = ProgramsServiceClient.program_path(account, program) + assert expected == actual + + +def test_parse_program_path(): + expected = { + "account": "whelk", + "program": "octopus", + } + path = ProgramsServiceClient.program_path(**expected) + + # Check that the path construction is reversible. + actual = ProgramsServiceClient.parse_program_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProgramsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ProgramsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProgramsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProgramsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ProgramsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProgramsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProgramsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ProgramsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProgramsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ProgramsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ProgramsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProgramsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProgramsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ProgramsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProgramsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ProgramsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProgramsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProgramsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ProgramsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ProgramsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ProgramsServiceClient, transports.ProgramsServiceGrpcTransport), + (ProgramsServiceAsyncClient, transports.ProgramsServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py new file mode 100644 index 000000000000..15d80238b28a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py @@ -0,0 +1,5692 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import wrappers_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.regions_service import ( + RegionsServiceAsyncClient, + RegionsServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import regions + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + RegionsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert RegionsServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RegionsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + RegionsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RegionsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RegionsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RegionsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RegionsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert RegionsServiceClient._get_client_cert_source(None, False) is None + assert ( + RegionsServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + RegionsServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RegionsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RegionsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RegionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceClient), +) +@mock.patch.object( + RegionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RegionsServiceClient._DEFAULT_UNIVERSE + default_endpoint = RegionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RegionsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RegionsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RegionsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + RegionsServiceClient._get_api_endpoint(None, None, default_universe, "always") + == RegionsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RegionsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + RegionsServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RegionsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RegionsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + RegionsServiceClient._get_universe_domain(None, None) + == RegionsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RegionsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RegionsServiceClient, transports.RegionsServiceGrpcTransport, "grpc"), + (RegionsServiceClient, transports.RegionsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionsServiceClient, "grpc"), + (RegionsServiceAsyncClient, "grpc_asyncio"), + (RegionsServiceClient, "rest"), + ], +) +def test_regions_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RegionsServiceGrpcTransport, "grpc"), + (transports.RegionsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RegionsServiceRestTransport, "rest"), + ], +) +def test_regions_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionsServiceClient, "grpc"), + (RegionsServiceAsyncClient, "grpc_asyncio"), + (RegionsServiceClient, "rest"), + ], +) +def test_regions_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_regions_service_client_get_transport_class(): + transport = RegionsServiceClient.get_transport_class() + available_transports = [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceRestTransport, + ] + assert transport in available_transports + + transport = RegionsServiceClient.get_transport_class("grpc") + assert transport == transports.RegionsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RegionsServiceClient, transports.RegionsServiceGrpcTransport, "grpc"), + ( + RegionsServiceAsyncClient, + transports.RegionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RegionsServiceClient, transports.RegionsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + RegionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceClient), +) +@mock.patch.object( + RegionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceAsyncClient), +) +def test_regions_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (RegionsServiceClient, transports.RegionsServiceGrpcTransport, "grpc", "true"), + ( + RegionsServiceAsyncClient, + transports.RegionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (RegionsServiceClient, transports.RegionsServiceGrpcTransport, "grpc", "false"), + ( + RegionsServiceAsyncClient, + transports.RegionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (RegionsServiceClient, transports.RegionsServiceRestTransport, "rest", "true"), + (RegionsServiceClient, transports.RegionsServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + RegionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceClient), +) +@mock.patch.object( + RegionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_regions_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [RegionsServiceClient, RegionsServiceAsyncClient] +) +@mock.patch.object( + RegionsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionsServiceClient), +) +@mock.patch.object( + RegionsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionsServiceAsyncClient), +) +def test_regions_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [RegionsServiceClient, RegionsServiceAsyncClient] +) +@mock.patch.object( + RegionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceClient), +) +@mock.patch.object( + RegionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionsServiceAsyncClient), +) +def test_regions_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RegionsServiceClient._DEFAULT_UNIVERSE + default_endpoint = RegionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RegionsServiceClient, transports.RegionsServiceGrpcTransport, "grpc"), + ( + RegionsServiceAsyncClient, + transports.RegionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RegionsServiceClient, transports.RegionsServiceRestTransport, "rest"), + ], +) +def test_regions_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RegionsServiceClient, + transports.RegionsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RegionsServiceAsyncClient, + transports.RegionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (RegionsServiceClient, transports.RegionsServiceRestTransport, "rest", None), + ], +) +def test_regions_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_regions_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.regions_service.transports.RegionsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = RegionsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RegionsServiceClient, + transports.RegionsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RegionsServiceAsyncClient, + transports.RegionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_regions_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.GetRegionRequest, + dict, + ], +) +def test_get_region(request_type, transport: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region( + name="name_value", + display_name="display_name_value", + ) + response = client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = regions.GetRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_region_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.GetRegionRequest() + + +def test_get_region_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = regions.GetRegionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_region(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.GetRegionRequest( + name="name_value", + ) + + +def test_get_region_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_region] = mock_rpc + request = {} + client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_region_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.Region( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.GetRegionRequest() + + +@pytest.mark.asyncio +async def test_get_region_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_region + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_region + ] = mock_object + + request = {} + await client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_region_async( + transport: str = "grpc_asyncio", request_type=regions.GetRegionRequest +): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.Region( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = regions.GetRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_region_async_from_dict(): + await test_get_region_async(request_type=dict) + + +def test_get_region_field_headers(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.GetRegionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + call.return_value = regions.Region() + client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_region_field_headers_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.GetRegionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(regions.Region()) + await client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_region_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_region( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_region_flattened_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_region( + regions.GetRegionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_region_flattened_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(regions.Region()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_region( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_region_flattened_error_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_region( + regions.GetRegionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.CreateRegionRequest, + dict, + ], +) +def test_create_region(request_type, transport: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region( + name="name_value", + display_name="display_name_value", + ) + response = client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = regions.CreateRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_region_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.CreateRegionRequest() + + +def test_create_region_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = regions.CreateRegionRequest( + parent="parent_value", + region_id="region_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_region(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.CreateRegionRequest( + parent="parent_value", + region_id="region_id_value", + ) + + +def test_create_region_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_region] = mock_rpc + request = {} + client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_region_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.Region( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.CreateRegionRequest() + + +@pytest.mark.asyncio +async def test_create_region_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_region + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_region + ] = mock_object + + request = {} + await client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_region_async( + transport: str = "grpc_asyncio", request_type=regions.CreateRegionRequest +): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.Region( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = regions.CreateRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_region_async_from_dict(): + await test_create_region_async(request_type=dict) + + +def test_create_region_field_headers(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.CreateRegionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + call.return_value = regions.Region() + client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_region_field_headers_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.CreateRegionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(regions.Region()) + await client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_region_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_region( + parent="parent_value", + region=regions.Region(name="name_value"), + region_id="region_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].region + mock_val = regions.Region(name="name_value") + assert arg == mock_val + arg = args[0].region_id + mock_val = "region_id_value" + assert arg == mock_val + + +def test_create_region_flattened_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_region( + regions.CreateRegionRequest(), + parent="parent_value", + region=regions.Region(name="name_value"), + region_id="region_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_region_flattened_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(regions.Region()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_region( + parent="parent_value", + region=regions.Region(name="name_value"), + region_id="region_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].region + mock_val = regions.Region(name="name_value") + assert arg == mock_val + arg = args[0].region_id + mock_val = "region_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_region_flattened_error_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_region( + regions.CreateRegionRequest(), + parent="parent_value", + region=regions.Region(name="name_value"), + region_id="region_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.UpdateRegionRequest, + dict, + ], +) +def test_update_region(request_type, transport: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region( + name="name_value", + display_name="display_name_value", + ) + response = client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = regions.UpdateRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_region_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.UpdateRegionRequest() + + +def test_update_region_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = regions.UpdateRegionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_region(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.UpdateRegionRequest() + + +def test_update_region_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_region] = mock_rpc + request = {} + client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_region_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.Region( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.UpdateRegionRequest() + + +@pytest.mark.asyncio +async def test_update_region_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_region + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_region + ] = mock_object + + request = {} + await client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_region_async( + transport: str = "grpc_asyncio", request_type=regions.UpdateRegionRequest +): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.Region( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = regions.UpdateRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_update_region_async_from_dict(): + await test_update_region_async(request_type=dict) + + +def test_update_region_field_headers(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.UpdateRegionRequest() + + request.region.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + call.return_value = regions.Region() + client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "region.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_region_field_headers_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.UpdateRegionRequest() + + request.region.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(regions.Region()) + await client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "region.name=name_value", + ) in kw["metadata"] + + +def test_update_region_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_region( + region=regions.Region(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].region + mock_val = regions.Region(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_region_flattened_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_region( + regions.UpdateRegionRequest(), + region=regions.Region(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_region_flattened_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.Region() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(regions.Region()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_region( + region=regions.Region(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].region + mock_val = regions.Region(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_region_flattened_error_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_region( + regions.UpdateRegionRequest(), + region=regions.Region(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.DeleteRegionRequest, + dict, + ], +) +def test_delete_region(request_type, transport: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = regions.DeleteRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_region_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.DeleteRegionRequest() + + +def test_delete_region_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = regions.DeleteRegionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_region(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.DeleteRegionRequest( + name="name_value", + ) + + +def test_delete_region_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_region] = mock_rpc + request = {} + client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_region_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_region() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.DeleteRegionRequest() + + +@pytest.mark.asyncio +async def test_delete_region_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_region + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_region + ] = mock_object + + request = {} + await client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_region_async( + transport: str = "grpc_asyncio", request_type=regions.DeleteRegionRequest +): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = regions.DeleteRegionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_region_async_from_dict(): + await test_delete_region_async(request_type=dict) + + +def test_delete_region_field_headers(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.DeleteRegionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + call.return_value = None + client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_region_field_headers_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.DeleteRegionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_region_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_region( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_region_flattened_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_region( + regions.DeleteRegionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_region_flattened_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_region), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_region( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_region_flattened_error_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_region( + regions.DeleteRegionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.ListRegionsRequest, + dict, + ], +) +def test_list_regions(request_type, transport: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.ListRegionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = regions.ListRegionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_regions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_regions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.ListRegionsRequest() + + +def test_list_regions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = regions.ListRegionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_regions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.ListRegionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_regions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_regions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_regions] = mock_rpc + request = {} + client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_regions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_regions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.ListRegionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_regions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == regions.ListRegionsRequest() + + +@pytest.mark.asyncio +async def test_list_regions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_regions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_regions + ] = mock_object + + request = {} + await client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_regions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_regions_async( + transport: str = "grpc_asyncio", request_type=regions.ListRegionsRequest +): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.ListRegionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = regions.ListRegionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_regions_async_from_dict(): + await test_list_regions_async(request_type=dict) + + +def test_list_regions_field_headers(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.ListRegionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + call.return_value = regions.ListRegionsResponse() + client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_regions_field_headers_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = regions.ListRegionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.ListRegionsResponse() + ) + await client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_regions_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.ListRegionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_regions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_regions_flattened_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_regions( + regions.ListRegionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_regions_flattened_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = regions.ListRegionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + regions.ListRegionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_regions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_regions_flattened_error_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_regions( + regions.ListRegionsRequest(), + parent="parent_value", + ) + + +def test_list_regions_pager(transport_name: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + regions.Region(), + ], + next_page_token="abc", + ), + regions.ListRegionsResponse( + regions=[], + next_page_token="def", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + ], + next_page_token="ghi", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_regions(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, regions.Region) for i in results) + + +def test_list_regions_pages(transport_name: str = "grpc"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_regions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + regions.Region(), + ], + next_page_token="abc", + ), + regions.ListRegionsResponse( + regions=[], + next_page_token="def", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + ], + next_page_token="ghi", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + ], + ), + RuntimeError, + ) + pages = list(client.list_regions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_regions_async_pager(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_regions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + regions.Region(), + ], + next_page_token="abc", + ), + regions.ListRegionsResponse( + regions=[], + next_page_token="def", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + ], + next_page_token="ghi", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_regions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, regions.Region) for i in responses) + + +@pytest.mark.asyncio +async def test_list_regions_async_pages(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_regions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + regions.Region(), + ], + next_page_token="abc", + ), + regions.ListRegionsResponse( + regions=[], + next_page_token="def", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + ], + next_page_token="ghi", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_regions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + regions.GetRegionRequest, + dict, + ], +) +def test_get_region_rest(request_type): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/regions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.Region( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_region(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_region_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_region] = mock_rpc + + request = {} + client.get_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_region_rest_required_fields(request_type=regions.GetRegionRequest): + transport_class = transports.RegionsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_region._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_region._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = regions.Region() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_region(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_region_rest_unset_required_fields(): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_region._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_region_rest_interceptors(null_interceptor): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionsServiceRestInterceptor(), + ) + client = RegionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionsServiceRestInterceptor, "post_get_region" + ) as post, mock.patch.object( + transports.RegionsServiceRestInterceptor, "pre_get_region" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = regions.GetRegionRequest.pb(regions.GetRegionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = regions.Region.to_json(regions.Region()) + + request = regions.GetRegionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = regions.Region() + + client.get_region( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_region_rest_bad_request( + transport: str = "rest", request_type=regions.GetRegionRequest +): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/regions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_region(request) + + +def test_get_region_rest_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.Region() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/regions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_region(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/regions/*}" % client.transport._host, + args[1], + ) + + +def test_get_region_rest_flattened_error(transport: str = "rest"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_region( + regions.GetRegionRequest(), + name="name_value", + ) + + +def test_get_region_rest_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.CreateRegionRequest, + dict, + ], +) +def test_create_region_rest(request_type): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request_init["region"] = { + "name": "name_value", + "display_name": "display_name_value", + "postal_code_area": { + "region_code": "region_code_value", + "postal_codes": [{"begin": "begin_value", "end": "end_value"}], + }, + "geotarget_area": {"geotarget_criteria_ids": [2324, 2325]}, + "regional_inventory_eligible": {"value": True}, + "shipping_eligible": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = regions.CreateRegionRequest.meta.fields["region"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["region"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["region"][field])): + del request_init["region"][field][i][subfield] + else: + del request_init["region"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.Region( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_region(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_region_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_region] = mock_rpc + + request = {} + client.create_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_region_rest_required_fields(request_type=regions.CreateRegionRequest): + transport_class = transports.RegionsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["region_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "regionId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_region._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "regionId" in jsonified_request + assert jsonified_request["regionId"] == request_init["region_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["regionId"] = "region_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_region._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("region_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "regionId" in jsonified_request + assert jsonified_request["regionId"] == "region_id_value" + + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = regions.Region() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_region(request) + + expected_params = [ + ( + "regionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_region_rest_unset_required_fields(): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_region._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("regionId",)) + & set( + ( + "parent", + "regionId", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_region_rest_interceptors(null_interceptor): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionsServiceRestInterceptor(), + ) + client = RegionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionsServiceRestInterceptor, "post_create_region" + ) as post, mock.patch.object( + transports.RegionsServiceRestInterceptor, "pre_create_region" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = regions.CreateRegionRequest.pb(regions.CreateRegionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = regions.Region.to_json(regions.Region()) + + request = regions.CreateRegionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = regions.Region() + + client.create_region( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_region_rest_bad_request( + transport: str = "rest", request_type=regions.CreateRegionRequest +): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_region(request) + + +def test_create_region_rest_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.Region() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + region=regions.Region(name="name_value"), + region_id="region_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_region(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/regions" % client.transport._host, + args[1], + ) + + +def test_create_region_rest_flattened_error(transport: str = "rest"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_region( + regions.CreateRegionRequest(), + parent="parent_value", + region=regions.Region(name="name_value"), + region_id="region_id_value", + ) + + +def test_create_region_rest_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.UpdateRegionRequest, + dict, + ], +) +def test_update_region_rest(request_type): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"region": {"name": "accounts/sample1/regions/sample2"}} + request_init["region"] = { + "name": "accounts/sample1/regions/sample2", + "display_name": "display_name_value", + "postal_code_area": { + "region_code": "region_code_value", + "postal_codes": [{"begin": "begin_value", "end": "end_value"}], + }, + "geotarget_area": {"geotarget_criteria_ids": [2324, 2325]}, + "regional_inventory_eligible": {"value": True}, + "shipping_eligible": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = regions.UpdateRegionRequest.meta.fields["region"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["region"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["region"][field])): + del request_init["region"][field][i][subfield] + else: + del request_init["region"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.Region( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_region(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, regions.Region) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_region_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_region] = mock_rpc + + request = {} + client.update_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_region_rest_required_fields(request_type=regions.UpdateRegionRequest): + transport_class = transports.RegionsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_region._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_region._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = regions.Region() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_region(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_region_rest_unset_required_fields(): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_region._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("region",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_region_rest_interceptors(null_interceptor): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionsServiceRestInterceptor(), + ) + client = RegionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionsServiceRestInterceptor, "post_update_region" + ) as post, mock.patch.object( + transports.RegionsServiceRestInterceptor, "pre_update_region" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = regions.UpdateRegionRequest.pb(regions.UpdateRegionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = regions.Region.to_json(regions.Region()) + + request = regions.UpdateRegionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = regions.Region() + + client.update_region( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_region_rest_bad_request( + transport: str = "rest", request_type=regions.UpdateRegionRequest +): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"region": {"name": "accounts/sample1/regions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_region(request) + + +def test_update_region_rest_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.Region() + + # get arguments that satisfy an http rule for this method + sample_request = {"region": {"name": "accounts/sample1/regions/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + region=regions.Region(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_region(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{region.name=accounts/*/regions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_region_rest_flattened_error(transport: str = "rest"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_region( + regions.UpdateRegionRequest(), + region=regions.Region(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_region_rest_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.DeleteRegionRequest, + dict, + ], +) +def test_delete_region_rest(request_type): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/regions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_region(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_region_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_region in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_region] = mock_rpc + + request = {} + client.delete_region(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_region(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_region_rest_required_fields(request_type=regions.DeleteRegionRequest): + transport_class = transports.RegionsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_region._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_region._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_region(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_region_rest_unset_required_fields(): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_region._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_region_rest_interceptors(null_interceptor): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionsServiceRestInterceptor(), + ) + client = RegionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionsServiceRestInterceptor, "pre_delete_region" + ) as pre: + pre.assert_not_called() + pb_message = regions.DeleteRegionRequest.pb(regions.DeleteRegionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = regions.DeleteRegionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_region( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_region_rest_bad_request( + transport: str = "rest", request_type=regions.DeleteRegionRequest +): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/regions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_region(request) + + +def test_delete_region_rest_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/regions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_region(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/regions/*}" % client.transport._host, + args[1], + ) + + +def test_delete_region_rest_flattened_error(transport: str = "rest"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_region( + regions.DeleteRegionRequest(), + name="name_value", + ) + + +def test_delete_region_rest_error(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + regions.ListRegionsRequest, + dict, + ], +) +def test_list_regions_rest(request_type): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.ListRegionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.ListRegionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_regions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_regions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_regions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_regions] = mock_rpc + + request = {} + client.list_regions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_regions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_regions_rest_required_fields(request_type=regions.ListRegionsRequest): + transport_class = transports.RegionsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_regions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_regions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = regions.ListRegionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = regions.ListRegionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_regions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_regions_rest_unset_required_fields(): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_regions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_regions_rest_interceptors(null_interceptor): + transport = transports.RegionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionsServiceRestInterceptor(), + ) + client = RegionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionsServiceRestInterceptor, "post_list_regions" + ) as post, mock.patch.object( + transports.RegionsServiceRestInterceptor, "pre_list_regions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = regions.ListRegionsRequest.pb(regions.ListRegionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = regions.ListRegionsResponse.to_json( + regions.ListRegionsResponse() + ) + + request = regions.ListRegionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = regions.ListRegionsResponse() + + client.list_regions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_regions_rest_bad_request( + transport: str = "rest", request_type=regions.ListRegionsRequest +): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_regions(request) + + +def test_list_regions_rest_flattened(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = regions.ListRegionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = regions.ListRegionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_regions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/regions" % client.transport._host, + args[1], + ) + + +def test_list_regions_rest_flattened_error(transport: str = "rest"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_regions( + regions.ListRegionsRequest(), + parent="parent_value", + ) + + +def test_list_regions_rest_pager(transport: str = "rest"): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + regions.Region(), + ], + next_page_token="abc", + ), + regions.ListRegionsResponse( + regions=[], + next_page_token="def", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + ], + next_page_token="ghi", + ), + regions.ListRegionsResponse( + regions=[ + regions.Region(), + regions.Region(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(regions.ListRegionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_regions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, regions.Region) for i in results) + + pages = list(client.list_regions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RegionsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceGrpcAsyncIOTransport, + transports.RegionsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = RegionsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RegionsServiceGrpcTransport, + ) + + +def test_regions_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_regions_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.regions_service.transports.RegionsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RegionsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_region", + "create_region", + "update_region", + "delete_region", + "list_regions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_regions_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.regions_service.transports.RegionsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_regions_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.regions_service.transports.RegionsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionsServiceTransport() + adc.assert_called_once() + + +def test_regions_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceGrpcAsyncIOTransport, + ], +) +def test_regions_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceGrpcAsyncIOTransport, + transports.RegionsServiceRestTransport, + ], +) +def test_regions_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RegionsServiceGrpcTransport, grpc_helpers), + (transports.RegionsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_regions_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceGrpcAsyncIOTransport, + ], +) +def test_regions_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_regions_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RegionsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_regions_service_host_no_port(transport_name): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_regions_service_host_with_port(transport_name): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_regions_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_region._session + session2 = client2.transport.get_region._session + assert session1 != session2 + session1 = client1.transport.create_region._session + session2 = client2.transport.create_region._session + assert session1 != session2 + session1 = client1.transport.update_region._session + session2 = client2.transport.update_region._session + assert session1 != session2 + session1 = client1.transport.delete_region._session + session2 = client2.transport.delete_region._session + assert session1 != session2 + session1 = client1.transport.list_regions._session + session2 = client2.transport.list_regions._session + assert session1 != session2 + + +def test_regions_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RegionsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_regions_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RegionsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceGrpcAsyncIOTransport, + ], +) +def test_regions_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionsServiceGrpcTransport, + transports.RegionsServiceGrpcAsyncIOTransport, + ], +) +def test_regions_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = RegionsServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = RegionsServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_account_path(path) + assert expected == actual + + +def test_region_path(): + account = "whelk" + region = "octopus" + expected = "accounts/{account}/regions/{region}".format( + account=account, + region=region, + ) + actual = RegionsServiceClient.region_path(account, region) + assert expected == actual + + +def test_parse_region_path(): + expected = { + "account": "oyster", + "region": "nudibranch", + } + path = RegionsServiceClient.region_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_region_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RegionsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = RegionsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RegionsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = RegionsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RegionsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = RegionsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = RegionsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = RegionsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RegionsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = RegionsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RegionsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RegionsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RegionsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = RegionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = RegionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RegionsServiceClient, transports.RegionsServiceGrpcTransport), + (RegionsServiceAsyncClient, transports.RegionsServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py new file mode 100644 index 000000000000..9b2b92fcb739 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py @@ -0,0 +1,3460 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.shopping.type.types import types +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.shipping_settings_service import ( + ShippingSettingsServiceAsyncClient, + ShippingSettingsServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import shippingsettings + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ShippingSettingsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ShippingSettingsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ShippingSettingsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ShippingSettingsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ShippingSettingsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ShippingSettingsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ShippingSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ShippingSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ShippingSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ShippingSettingsServiceClient._get_client_cert_source(None, False) is None + assert ( + ShippingSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ShippingSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ShippingSettingsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ShippingSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ShippingSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceClient), +) +@mock.patch.object( + ShippingSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ShippingSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ShippingSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ShippingSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ShippingSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ShippingSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ShippingSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + ShippingSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ShippingSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ShippingSettingsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ShippingSettingsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ShippingSettingsServiceClient._get_universe_domain(None, None) + == ShippingSettingsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ShippingSettingsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ShippingSettingsServiceClient, "grpc"), + (ShippingSettingsServiceAsyncClient, "grpc_asyncio"), + (ShippingSettingsServiceClient, "rest"), + ], +) +def test_shipping_settings_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ShippingSettingsServiceGrpcTransport, "grpc"), + (transports.ShippingSettingsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ShippingSettingsServiceRestTransport, "rest"), + ], +) +def test_shipping_settings_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ShippingSettingsServiceClient, "grpc"), + (ShippingSettingsServiceAsyncClient, "grpc_asyncio"), + (ShippingSettingsServiceClient, "rest"), + ], +) +def test_shipping_settings_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_shipping_settings_service_client_get_transport_class(): + transport = ShippingSettingsServiceClient.get_transport_class() + available_transports = [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceRestTransport, + ] + assert transport in available_transports + + transport = ShippingSettingsServiceClient.get_transport_class("grpc") + assert transport == transports.ShippingSettingsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + ShippingSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceClient), +) +@mock.patch.object( + ShippingSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceAsyncClient), +) +def test_shipping_settings_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ShippingSettingsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ShippingSettingsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + "true", + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + "false", + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceRestTransport, + "rest", + "true", + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ShippingSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceClient), +) +@mock.patch.object( + ShippingSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_shipping_settings_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ShippingSettingsServiceClient, ShippingSettingsServiceAsyncClient] +) +@mock.patch.object( + ShippingSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ShippingSettingsServiceClient), +) +@mock.patch.object( + ShippingSettingsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ShippingSettingsServiceAsyncClient), +) +def test_shipping_settings_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ShippingSettingsServiceClient, ShippingSettingsServiceAsyncClient] +) +@mock.patch.object( + ShippingSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceClient), +) +@mock.patch.object( + ShippingSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ShippingSettingsServiceAsyncClient), +) +def test_shipping_settings_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ShippingSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ShippingSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ShippingSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceRestTransport, + "rest", + ), + ], +) +def test_shipping_settings_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_shipping_settings_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_shipping_settings_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.shipping_settings_service.transports.ShippingSettingsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ShippingSettingsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_shipping_settings_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + shippingsettings.GetShippingSettingsRequest, + dict, + ], +) +def test_get_shipping_settings(request_type, transport: str = "grpc"): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + response = client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = shippingsettings.GetShippingSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, shippingsettings.ShippingSettings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +def test_get_shipping_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_shipping_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == shippingsettings.GetShippingSettingsRequest() + + +def test_get_shipping_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = shippingsettings.GetShippingSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_shipping_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == shippingsettings.GetShippingSettingsRequest( + name="name_value", + ) + + +def test_get_shipping_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_shipping_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_shipping_settings + ] = mock_rpc + request = {} + client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_shipping_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_shipping_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + ) + response = await client.get_shipping_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == shippingsettings.GetShippingSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_shipping_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_shipping_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_shipping_settings + ] = mock_object + + request = {} + await client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_shipping_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_shipping_settings_async( + transport: str = "grpc_asyncio", + request_type=shippingsettings.GetShippingSettingsRequest, +): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + ) + response = await client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = shippingsettings.GetShippingSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, shippingsettings.ShippingSettings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_shipping_settings_async_from_dict(): + await test_get_shipping_settings_async(request_type=dict) + + +def test_get_shipping_settings_field_headers(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = shippingsettings.GetShippingSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + call.return_value = shippingsettings.ShippingSettings() + client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_shipping_settings_field_headers_async(): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = shippingsettings.GetShippingSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings() + ) + await client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_shipping_settings_flattened(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = shippingsettings.ShippingSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_shipping_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_shipping_settings_flattened_error(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_shipping_settings( + shippingsettings.GetShippingSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_shipping_settings_flattened_async(): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = shippingsettings.ShippingSettings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_shipping_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_shipping_settings_flattened_error_async(): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_shipping_settings( + shippingsettings.GetShippingSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + shippingsettings.InsertShippingSettingsRequest, + dict, + ], +) +def test_insert_shipping_settings(request_type, transport: str = "grpc"): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + response = client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = shippingsettings.InsertShippingSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, shippingsettings.ShippingSettings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +def test_insert_shipping_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.insert_shipping_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == shippingsettings.InsertShippingSettingsRequest() + + +def test_insert_shipping_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = shippingsettings.InsertShippingSettingsRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.insert_shipping_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == shippingsettings.InsertShippingSettingsRequest( + parent="parent_value", + ) + + +def test_insert_shipping_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.insert_shipping_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.insert_shipping_settings + ] = mock_rpc + request = {} + client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.insert_shipping_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_insert_shipping_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + ) + response = await client.insert_shipping_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == shippingsettings.InsertShippingSettingsRequest() + + +@pytest.mark.asyncio +async def test_insert_shipping_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.insert_shipping_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.insert_shipping_settings + ] = mock_object + + request = {} + await client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.insert_shipping_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_insert_shipping_settings_async( + transport: str = "grpc_asyncio", + request_type=shippingsettings.InsertShippingSettingsRequest, +): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + ) + response = await client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = shippingsettings.InsertShippingSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, shippingsettings.ShippingSettings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_insert_shipping_settings_async_from_dict(): + await test_insert_shipping_settings_async(request_type=dict) + + +def test_insert_shipping_settings_field_headers(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = shippingsettings.InsertShippingSettingsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + call.return_value = shippingsettings.ShippingSettings() + client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_insert_shipping_settings_field_headers_async(): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = shippingsettings.InsertShippingSettingsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_shipping_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + shippingsettings.ShippingSettings() + ) + await client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + shippingsettings.GetShippingSettingsRequest, + dict, + ], +) +def test_get_shipping_settings_rest(request_type): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/shippingSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = shippingsettings.ShippingSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_shipping_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, shippingsettings.ShippingSettings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +def test_get_shipping_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_shipping_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_shipping_settings + ] = mock_rpc + + request = {} + client.get_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_shipping_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_shipping_settings_rest_required_fields( + request_type=shippingsettings.GetShippingSettingsRequest, +): + transport_class = transports.ShippingSettingsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_shipping_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_shipping_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = shippingsettings.ShippingSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = shippingsettings.ShippingSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_shipping_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_shipping_settings_rest_unset_required_fields(): + transport = transports.ShippingSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_shipping_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_shipping_settings_rest_interceptors(null_interceptor): + transport = transports.ShippingSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ShippingSettingsServiceRestInterceptor(), + ) + client = ShippingSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ShippingSettingsServiceRestInterceptor, "post_get_shipping_settings" + ) as post, mock.patch.object( + transports.ShippingSettingsServiceRestInterceptor, "pre_get_shipping_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = shippingsettings.GetShippingSettingsRequest.pb( + shippingsettings.GetShippingSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = shippingsettings.ShippingSettings.to_json( + shippingsettings.ShippingSettings() + ) + + request = shippingsettings.GetShippingSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = shippingsettings.ShippingSettings() + + client.get_shipping_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_shipping_settings_rest_bad_request( + transport: str = "rest", request_type=shippingsettings.GetShippingSettingsRequest +): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/shippingSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_shipping_settings(request) + + +def test_get_shipping_settings_rest_flattened(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = shippingsettings.ShippingSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/shippingSettings"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = shippingsettings.ShippingSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_shipping_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/shippingSettings}" + % client.transport._host, + args[1], + ) + + +def test_get_shipping_settings_rest_flattened_error(transport: str = "rest"): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_shipping_settings( + shippingsettings.GetShippingSettingsRequest(), + name="name_value", + ) + + +def test_get_shipping_settings_rest_error(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + shippingsettings.InsertShippingSettingsRequest, + dict, + ], +) +def test_insert_shipping_settings_rest(request_type): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request_init["shipping_setting"] = { + "name": "name_value", + "services": [ + { + "service_name": "service_name_value", + "active": True, + "delivery_countries": [ + "delivery_countries_value1", + "delivery_countries_value2", + ], + "currency_code": "currency_code_value", + "delivery_time": { + "min_transit_days": 1720, + "max_transit_days": 1722, + "cutoff_time": { + "hour": 446, + "minute": 658, + "time_zone": "time_zone_value", + }, + "min_handling_days": 1784, + "max_handling_days": 1786, + "transit_time_table": { + "postal_code_group_names": [ + "postal_code_group_names_value1", + "postal_code_group_names_value2", + ], + "transit_time_labels": [ + "transit_time_labels_value1", + "transit_time_labels_value2", + ], + "rows": [ + { + "values": [ + {"min_transit_days": 1720, "max_transit_days": 1722} + ] + } + ], + }, + "handling_business_day_config": {"business_days": [1]}, + "transit_business_day_config": {}, + "warehouse_based_delivery_times": [ + { + "carrier": "carrier_value", + "carrier_service": "carrier_service_value", + "warehouse": "warehouse_value", + } + ], + }, + "rate_groups": [ + { + "applicable_shipping_labels": [ + "applicable_shipping_labels_value1", + "applicable_shipping_labels_value2", + ], + "single_value": { + "no_shipping": True, + "flat_rate": { + "amount_micros": 1408, + "currency_code": "currency_code_value", + }, + "price_percentage": "price_percentage_value", + "carrier_rate": "carrier_rate_value", + "subtable": "subtable_value", + }, + "main_table": { + "name": "name_value", + "row_headers": { + "prices": {}, + "weights": [{"amount_micros": 1408, "unit": 1}], + "number_of_items": [ + "number_of_items_value1", + "number_of_items_value2", + ], + "postal_code_group_names": [ + "postal_code_group_names_value1", + "postal_code_group_names_value2", + ], + "locations": [ + { + "location_ids": [ + "location_ids_value1", + "location_ids_value2", + ] + } + ], + }, + "column_headers": {}, + "rows": [{"cells": {}}], + }, + "subtables": {}, + "carrier_rates": [ + { + "name": "name_value", + "carrier": "carrier_value", + "carrier_service": "carrier_service_value", + "origin_postal_code": "origin_postal_code_value", + "percentage_adjustment": "percentage_adjustment_value", + "flat_adjustment": {}, + } + ], + "name": "name_value", + } + ], + "shipment_type": 1, + "minimum_order_value": {}, + "minimum_order_value_table": { + "store_code_set_with_movs": [ + { + "store_codes": ["store_codes_value1", "store_codes_value2"], + "value": {}, + } + ] + }, + "store_config": { + "store_service_type": 1, + "store_codes": ["store_codes_value1", "store_codes_value2"], + "cutoff_config": { + "local_cutoff_time": {"hour": 446, "minute": 658}, + "store_close_offset_hours": 2584, + "no_delivery_post_cutoff": True, + }, + "service_radius": {"value": 541, "unit": 1}, + }, + "loyalty_programs": [ + { + "program_label": "program_label_value", + "loyalty_program_tiers": [{"tier_label": "tier_label_value"}], + } + ], + } + ], + "warehouses": [ + { + "name": "name_value", + "shipping_address": { + "street_address": "street_address_value", + "city": "city_value", + "administrative_area": "administrative_area_value", + "postal_code": "postal_code_value", + "region_code": "region_code_value", + }, + "cutoff_time": {"hour": 446, "minute": 658}, + "handling_days": 1365, + "business_day_config": {}, + } + ], + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = shippingsettings.InsertShippingSettingsRequest.meta.fields[ + "shipping_setting" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["shipping_setting"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["shipping_setting"][field])): + del request_init["shipping_setting"][field][i][subfield] + else: + del request_init["shipping_setting"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = shippingsettings.ShippingSettings( + name="name_value", + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = shippingsettings.ShippingSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_shipping_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, shippingsettings.ShippingSettings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +def test_insert_shipping_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.insert_shipping_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.insert_shipping_settings + ] = mock_rpc + + request = {} + client.insert_shipping_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.insert_shipping_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_shipping_settings_rest_required_fields( + request_type=shippingsettings.InsertShippingSettingsRequest, +): + transport_class = transports.ShippingSettingsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert_shipping_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert_shipping_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = shippingsettings.ShippingSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = shippingsettings.ShippingSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_shipping_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_shipping_settings_rest_unset_required_fields(): + transport = transports.ShippingSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert_shipping_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "shippingSetting", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_shipping_settings_rest_interceptors(null_interceptor): + transport = transports.ShippingSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ShippingSettingsServiceRestInterceptor(), + ) + client = ShippingSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ShippingSettingsServiceRestInterceptor, + "post_insert_shipping_settings", + ) as post, mock.patch.object( + transports.ShippingSettingsServiceRestInterceptor, + "pre_insert_shipping_settings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = shippingsettings.InsertShippingSettingsRequest.pb( + shippingsettings.InsertShippingSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = shippingsettings.ShippingSettings.to_json( + shippingsettings.ShippingSettings() + ) + + request = shippingsettings.InsertShippingSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = shippingsettings.ShippingSettings() + + client.insert_shipping_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_shipping_settings_rest_bad_request( + transport: str = "rest", request_type=shippingsettings.InsertShippingSettingsRequest +): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_shipping_settings(request) + + +def test_insert_shipping_settings_rest_error(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ShippingSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ShippingSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ShippingSettingsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ShippingSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ShippingSettingsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ShippingSettingsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ShippingSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ShippingSettingsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ShippingSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ShippingSettingsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ShippingSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ShippingSettingsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + transports.ShippingSettingsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ShippingSettingsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ShippingSettingsServiceGrpcTransport, + ) + + +def test_shipping_settings_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ShippingSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_shipping_settings_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.shipping_settings_service.transports.ShippingSettingsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ShippingSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_shipping_settings", + "insert_shipping_settings", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_shipping_settings_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.shipping_settings_service.transports.ShippingSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ShippingSettingsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_shipping_settings_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.shipping_settings_service.transports.ShippingSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ShippingSettingsServiceTransport() + adc.assert_called_once() + + +def test_shipping_settings_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ShippingSettingsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_shipping_settings_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + transports.ShippingSettingsServiceRestTransport, + ], +) +def test_shipping_settings_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ShippingSettingsServiceGrpcTransport, grpc_helpers), + (transports.ShippingSettingsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_shipping_settings_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_shipping_settings_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_shipping_settings_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ShippingSettingsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_shipping_settings_service_host_no_port(transport_name): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_shipping_settings_service_host_with_port(transport_name): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_shipping_settings_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ShippingSettingsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ShippingSettingsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_shipping_settings._session + session2 = client2.transport.get_shipping_settings._session + assert session1 != session2 + session1 = client1.transport.insert_shipping_settings._session + session2 = client2.transport.insert_shipping_settings._session + assert session1 != session2 + + +def test_shipping_settings_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ShippingSettingsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_shipping_settings_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ShippingSettingsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_shipping_settings_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ShippingSettingsServiceGrpcTransport, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_shipping_settings_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_shipping_settings_path(): + account = "squid" + expected = "accounts/{account}/shippingSettings".format( + account=account, + ) + actual = ShippingSettingsServiceClient.shipping_settings_path(account) + assert expected == actual + + +def test_parse_shipping_settings_path(): + expected = { + "account": "clam", + } + path = ShippingSettingsServiceClient.shipping_settings_path(**expected) + + # Check that the path construction is reversible. + actual = ShippingSettingsServiceClient.parse_shipping_settings_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ShippingSettingsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ShippingSettingsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ShippingSettingsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ShippingSettingsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ShippingSettingsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ShippingSettingsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ShippingSettingsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ShippingSettingsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ShippingSettingsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ShippingSettingsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ShippingSettingsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ShippingSettingsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ShippingSettingsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ShippingSettingsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ShippingSettingsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ShippingSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ShippingSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ShippingSettingsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ShippingSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ShippingSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + ShippingSettingsServiceClient, + transports.ShippingSettingsServiceGrpcTransport, + ), + ( + ShippingSettingsServiceAsyncClient, + transports.ShippingSettingsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py new file mode 100644 index 000000000000..0645ded3118a --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py @@ -0,0 +1,3661 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service import ( + TermsOfServiceAgreementStateServiceAsyncClient, + TermsOfServiceAgreementStateServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import ( + termsofserviceagreementstate, + termsofservicekind, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + TermsOfServiceAgreementStateServiceClient._get_default_mtls_endpoint(None) + is None + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_default_mtls_endpoint( + api_endpoint + ) + == api_mtls_endpoint + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_default_mtls_endpoint( + non_googleapi + ) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert TermsOfServiceAgreementStateServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ( + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + == (True, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ( + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ( + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + == (False, "never", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ( + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + == (False, "always", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ( + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ( + TermsOfServiceAgreementStateServiceClient._read_environment_variables() + == (False, "auto", "foo.com") + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + TermsOfServiceAgreementStateServiceClient._get_client_cert_source(None, False) + is None + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + TermsOfServiceAgreementStateServiceClient._get_client_cert_source( + None, True + ) + is mock_default_cert_source + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + TermsOfServiceAgreementStateServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceClient), +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + TermsOfServiceAgreementStateServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + TermsOfServiceAgreementStateServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == TermsOfServiceAgreementStateServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == TermsOfServiceAgreementStateServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == TermsOfServiceAgreementStateServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + TermsOfServiceAgreementStateServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + TermsOfServiceAgreementStateServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + TermsOfServiceAgreementStateServiceClient._get_universe_domain(None, None) + == TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + TermsOfServiceAgreementStateServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TermsOfServiceAgreementStateServiceClient, "grpc"), + (TermsOfServiceAgreementStateServiceAsyncClient, "grpc_asyncio"), + (TermsOfServiceAgreementStateServiceClient, "rest"), + ], +) +def test_terms_of_service_agreement_state_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TermsOfServiceAgreementStateServiceGrpcTransport, "grpc"), + ( + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (transports.TermsOfServiceAgreementStateServiceRestTransport, "rest"), + ], +) +def test_terms_of_service_agreement_state_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TermsOfServiceAgreementStateServiceClient, "grpc"), + (TermsOfServiceAgreementStateServiceAsyncClient, "grpc_asyncio"), + (TermsOfServiceAgreementStateServiceClient, "rest"), + ], +) +def test_terms_of_service_agreement_state_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_terms_of_service_agreement_state_service_client_get_transport_class(): + transport = TermsOfServiceAgreementStateServiceClient.get_transport_class() + available_transports = [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceRestTransport, + ] + assert transport in available_transports + + transport = TermsOfServiceAgreementStateServiceClient.get_transport_class("grpc") + assert transport == transports.TermsOfServiceAgreementStateServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceClient), +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceAsyncClient), +) +def test_terms_of_service_agreement_state_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + TermsOfServiceAgreementStateServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + TermsOfServiceAgreementStateServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + "true", + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + "false", + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceRestTransport, + "rest", + "true", + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceClient), +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_terms_of_service_agreement_state_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [ + TermsOfServiceAgreementStateServiceClient, + TermsOfServiceAgreementStateServiceAsyncClient, + ], +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TermsOfServiceAgreementStateServiceClient), +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TermsOfServiceAgreementStateServiceAsyncClient), +) +def test_terms_of_service_agreement_state_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [ + TermsOfServiceAgreementStateServiceClient, + TermsOfServiceAgreementStateServiceAsyncClient, + ], +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceClient), +) +@mock.patch.object( + TermsOfServiceAgreementStateServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceAgreementStateServiceAsyncClient), +) +def test_terms_of_service_agreement_state_service_client_client_api_endpoint( + client_class, +): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = TermsOfServiceAgreementStateServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + TermsOfServiceAgreementStateServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + TermsOfServiceAgreementStateServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceRestTransport, + "rest", + ), + ], +) +def test_terms_of_service_agreement_state_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceRestTransport, + "rest", + None, + ), + ], +) +def test_terms_of_service_agreement_state_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_terms_of_service_agreement_state_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service.transports.TermsOfServiceAgreementStateServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TermsOfServiceAgreementStateServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_terms_of_service_agreement_state_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + dict, + ], +) +def test_get_terms_of_service_agreement_state(request_type, transport: str = "grpc"): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + response = client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance( + response, termsofserviceagreementstate.TermsOfServiceAgreementState + ) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert ( + response.terms_of_service_kind + == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + ) + + +def test_get_terms_of_service_agreement_state_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_terms_of_service_agreement_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + ) + + +def test_get_terms_of_service_agreement_state_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_terms_of_service_agreement_state(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest( + name="name_value", + ) + + +def test_get_terms_of_service_agreement_state_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_terms_of_service_agreement_state + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_terms_of_service_agreement_state + ] = mock_rpc + request = {} + client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_terms_of_service_agreement_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + ) + response = await client.get_terms_of_service_agreement_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + ) + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_terms_of_service_agreement_state + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_terms_of_service_agreement_state + ] = mock_object + + request = {} + await client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_terms_of_service_agreement_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_async( + transport: str = "grpc_asyncio", + request_type=termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, +): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + ) + response = await client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance( + response, termsofserviceagreementstate.TermsOfServiceAgreementState + ) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert ( + response.terms_of_service_kind + == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + ) + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_async_from_dict(): + await test_get_terms_of_service_agreement_state_async(request_type=dict) + + +def test_get_terms_of_service_agreement_state_field_headers(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_field_headers_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState() + ) + await client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_terms_of_service_agreement_state_flattened(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_terms_of_service_agreement_state( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_terms_of_service_agreement_state_flattened_error(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_terms_of_service_agreement_state( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_flattened_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service_agreement_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_terms_of_service_agreement_state( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_terms_of_service_agreement_state_flattened_error_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_terms_of_service_agreement_state( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + dict, + ], +) +def test_retrieve_for_application_terms_of_service_agreement_state( + request_type, transport: str = "grpc" +): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + response = client.retrieve_for_application_terms_of_service_agreement_state( + request + ) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance( + response, termsofserviceagreementstate.TermsOfServiceAgreementState + ) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert ( + response.terms_of_service_kind + == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.retrieve_for_application_terms_of_service_agreement_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.retrieve_for_application_terms_of_service_agreement_state( + request=request + ) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest( + parent="parent_value", + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.retrieve_for_application_terms_of_service_agreement_state + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.retrieve_for_application_terms_of_service_agreement_state + ] = mock_rpc + request = {} + client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + ) + response = ( + await client.retrieve_for_application_terms_of_service_agreement_state() + ) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.retrieve_for_application_terms_of_service_agreement_state + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.retrieve_for_application_terms_of_service_agreement_state + ] = mock_object + + request = {} + await client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_async( + transport: str = "grpc_asyncio", + request_type=termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, +): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + ) + response = ( + await client.retrieve_for_application_terms_of_service_agreement_state( + request + ) + ) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance( + response, termsofserviceagreementstate.TermsOfServiceAgreementState + ) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert ( + response.terms_of_service_kind + == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + ) + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_async_from_dict(): + await test_retrieve_for_application_terms_of_service_agreement_state_async( + request_type=dict + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_field_headers(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_field_headers_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState() + ) + await client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_retrieve_for_application_terms_of_service_agreement_state_flattened(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_for_application_terms_of_service_agreement_state( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_retrieve_for_application_terms_of_service_agreement_state_flattened_error(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_for_application_terms_of_service_agreement_state( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_flattened_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type( + client.transport.retrieve_for_application_terms_of_service_agreement_state + ), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofserviceagreementstate.TermsOfServiceAgreementState() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = ( + await client.retrieve_for_application_terms_of_service_agreement_state( + parent="parent_value", + ) + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_for_application_terms_of_service_agreement_state_flattened_error_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_for_application_terms_of_service_agreement_state( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, + dict, + ], +) +def test_get_terms_of_service_agreement_state_rest(request_type): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/termsOfServiceAgreementStates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_terms_of_service_agreement_state(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, termsofserviceagreementstate.TermsOfServiceAgreementState + ) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert ( + response.terms_of_service_kind + == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + ) + + +def test_get_terms_of_service_agreement_state_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_terms_of_service_agreement_state + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_terms_of_service_agreement_state + ] = mock_rpc + + request = {} + client.get_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_terms_of_service_agreement_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_terms_of_service_agreement_state_rest_required_fields( + request_type=termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, +): + transport_class = transports.TermsOfServiceAgreementStateServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_terms_of_service_agreement_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_terms_of_service_agreement_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_terms_of_service_agreement_state(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_terms_of_service_agreement_state_rest_unset_required_fields(): + transport = transports.TermsOfServiceAgreementStateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_terms_of_service_agreement_state._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_terms_of_service_agreement_state_rest_interceptors(null_interceptor): + transport = transports.TermsOfServiceAgreementStateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TermsOfServiceAgreementStateServiceRestInterceptor(), + ) + client = TermsOfServiceAgreementStateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TermsOfServiceAgreementStateServiceRestInterceptor, + "post_get_terms_of_service_agreement_state", + ) as post, mock.patch.object( + transports.TermsOfServiceAgreementStateServiceRestInterceptor, + "pre_get_terms_of_service_agreement_state", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest.pb( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + termsofserviceagreementstate.TermsOfServiceAgreementState.to_json( + termsofserviceagreementstate.TermsOfServiceAgreementState() + ) + ) + + request = termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + + client.get_terms_of_service_agreement_state( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_terms_of_service_agreement_state_rest_bad_request( + transport: str = "rest", + request_type=termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest, +): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/termsOfServiceAgreementStates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_terms_of_service_agreement_state(request) + + +def test_get_terms_of_service_agreement_state_rest_flattened(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "accounts/sample1/termsOfServiceAgreementStates/sample2" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_terms_of_service_agreement_state(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/termsOfServiceAgreementStates/*}" + % client.transport._host, + args[1], + ) + + +def test_get_terms_of_service_agreement_state_rest_flattened_error( + transport: str = "rest", +): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_terms_of_service_agreement_state( + termsofserviceagreementstate.GetTermsOfServiceAgreementStateRequest(), + name="name_value", + ) + + +def test_get_terms_of_service_agreement_state_rest_error(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, + dict, + ], +) +def test_retrieve_for_application_terms_of_service_agreement_state_rest(request_type): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState( + name="name_value", + region_code="region_code_value", + terms_of_service_kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_for_application_terms_of_service_agreement_state( + request + ) + + # Establish that the response is the type that we expect. + assert isinstance( + response, termsofserviceagreementstate.TermsOfServiceAgreementState + ) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert ( + response.terms_of_service_kind + == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.retrieve_for_application_terms_of_service_agreement_state + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.retrieve_for_application_terms_of_service_agreement_state + ] = mock_rpc + + request = {} + client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.retrieve_for_application_terms_of_service_agreement_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_required_fields( + request_type=termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, +): + transport_class = transports.TermsOfServiceAgreementStateServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_for_application_terms_of_service_agreement_state._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_for_application_terms_of_service_agreement_state._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_for_application_terms_of_service_agreement_state( + request + ) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_unset_required_fields(): + transport = transports.TermsOfServiceAgreementStateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_for_application_terms_of_service_agreement_state._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_for_application_terms_of_service_agreement_state_rest_interceptors( + null_interceptor, +): + transport = transports.TermsOfServiceAgreementStateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TermsOfServiceAgreementStateServiceRestInterceptor(), + ) + client = TermsOfServiceAgreementStateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TermsOfServiceAgreementStateServiceRestInterceptor, + "post_retrieve_for_application_terms_of_service_agreement_state", + ) as post, mock.patch.object( + transports.TermsOfServiceAgreementStateServiceRestInterceptor, + "pre_retrieve_for_application_terms_of_service_agreement_state", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest.pb( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + termsofserviceagreementstate.TermsOfServiceAgreementState.to_json( + termsofserviceagreementstate.TermsOfServiceAgreementState() + ) + ) + + request = ( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + + client.retrieve_for_application_terms_of_service_agreement_state( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_bad_request( + transport: str = "rest", + request_type=termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest, +): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_for_application_terms_of_service_agreement_state(request) + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_flattened(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofserviceagreementstate.TermsOfServiceAgreementState.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_for_application_terms_of_service_agreement_state(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/termsOfServiceAgreementStates:retrieveForApplication" + % client.transport._host, + args[1], + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_flattened_error( + transport: str = "rest", +): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_for_application_terms_of_service_agreement_state( + termsofserviceagreementstate.RetrieveForApplicationTermsOfServiceAgreementStateRequest(), + parent="parent_value", + ) + + +def test_retrieve_for_application_terms_of_service_agreement_state_rest_error(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TermsOfServiceAgreementStateServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TermsOfServiceAgreementStateServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TermsOfServiceAgreementStateServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TermsOfServiceAgreementStateServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TermsOfServiceAgreementStateServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + transports.TermsOfServiceAgreementStateServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TermsOfServiceAgreementStateServiceClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + ) + + +def test_terms_of_service_agreement_state_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TermsOfServiceAgreementStateServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_terms_of_service_agreement_state_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service.transports.TermsOfServiceAgreementStateServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TermsOfServiceAgreementStateServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_terms_of_service_agreement_state", + "retrieve_for_application_terms_of_service_agreement_state", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_terms_of_service_agreement_state_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service.transports.TermsOfServiceAgreementStateServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TermsOfServiceAgreementStateServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_terms_of_service_agreement_state_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_agreement_state_service.transports.TermsOfServiceAgreementStateServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TermsOfServiceAgreementStateServiceTransport() + adc.assert_called_once() + + +def test_terms_of_service_agreement_state_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TermsOfServiceAgreementStateServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_agreement_state_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + transports.TermsOfServiceAgreementStateServiceRestTransport, + ], +) +def test_terms_of_service_agreement_state_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TermsOfServiceAgreementStateServiceGrpcTransport, grpc_helpers), + ( + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_terms_of_service_agreement_state_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_agreement_state_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_terms_of_service_agreement_state_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TermsOfServiceAgreementStateServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_terms_of_service_agreement_state_service_host_no_port(transport_name): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_terms_of_service_agreement_state_service_host_with_port(transport_name): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_terms_of_service_agreement_state_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TermsOfServiceAgreementStateServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TermsOfServiceAgreementStateServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_terms_of_service_agreement_state._session + session2 = client2.transport.get_terms_of_service_agreement_state._session + assert session1 != session2 + session1 = ( + client1.transport.retrieve_for_application_terms_of_service_agreement_state._session + ) + session2 = ( + client2.transport.retrieve_for_application_terms_of_service_agreement_state._session + ) + assert session1 != session2 + + +def test_terms_of_service_agreement_state_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TermsOfServiceAgreementStateServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_terms_of_service_agreement_state_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_agreement_state_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_agreement_state_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = TermsOfServiceAgreementStateServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = TermsOfServiceAgreementStateServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_account_path(path) + assert expected == actual + + +def test_terms_of_service_path(): + version = "whelk" + expected = "termsOfService/{version}".format( + version=version, + ) + actual = TermsOfServiceAgreementStateServiceClient.terms_of_service_path(version) + assert expected == actual + + +def test_parse_terms_of_service_path(): + expected = { + "version": "octopus", + } + path = TermsOfServiceAgreementStateServiceClient.terms_of_service_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_terms_of_service_path(path) + assert expected == actual + + +def test_terms_of_service_agreement_state_path(): + account = "oyster" + identifier = "nudibranch" + expected = "accounts/{account}/termsOfServiceAgreementStates/{identifier}".format( + account=account, + identifier=identifier, + ) + actual = ( + TermsOfServiceAgreementStateServiceClient.terms_of_service_agreement_state_path( + account, identifier + ) + ) + assert expected == actual + + +def test_parse_terms_of_service_agreement_state_path(): + expected = { + "account": "cuttlefish", + "identifier": "mussel", + } + path = ( + TermsOfServiceAgreementStateServiceClient.terms_of_service_agreement_state_path( + **expected + ) + ) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_terms_of_service_agreement_state_path( + path + ) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TermsOfServiceAgreementStateServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = TermsOfServiceAgreementStateServiceClient.common_billing_account_path( + **expected + ) + + # Check that the path construction is reversible. + actual = ( + TermsOfServiceAgreementStateServiceClient.parse_common_billing_account_path( + path + ) + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TermsOfServiceAgreementStateServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = TermsOfServiceAgreementStateServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TermsOfServiceAgreementStateServiceClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = TermsOfServiceAgreementStateServiceClient.common_organization_path( + **expected + ) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_common_organization_path( + path + ) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = TermsOfServiceAgreementStateServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = TermsOfServiceAgreementStateServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TermsOfServiceAgreementStateServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = TermsOfServiceAgreementStateServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceAgreementStateServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TermsOfServiceAgreementStateServiceTransport, + "_prep_wrapped_messages", + ) as prep: + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TermsOfServiceAgreementStateServiceTransport, + "_prep_wrapped_messages", + ) as prep: + transport_class = ( + TermsOfServiceAgreementStateServiceClient.get_transport_class() + ) + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TermsOfServiceAgreementStateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TermsOfServiceAgreementStateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + TermsOfServiceAgreementStateServiceClient, + transports.TermsOfServiceAgreementStateServiceGrpcTransport, + ), + ( + TermsOfServiceAgreementStateServiceAsyncClient, + transports.TermsOfServiceAgreementStateServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py new file mode 100644 index 000000000000..61ce616ed7c1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py @@ -0,0 +1,3865 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.terms_of_service_service import ( + TermsOfServiceServiceAsyncClient, + TermsOfServiceServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import ( + termsofservice, + termsofservicekind, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TermsOfServiceServiceClient._get_default_mtls_endpoint(None) is None + assert ( + TermsOfServiceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + TermsOfServiceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TermsOfServiceServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TermsOfServiceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TermsOfServiceServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + TermsOfServiceServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + TermsOfServiceServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert TermsOfServiceServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert TermsOfServiceServiceClient._get_client_cert_source(None, False) is None + assert ( + TermsOfServiceServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + TermsOfServiceServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + TermsOfServiceServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + TermsOfServiceServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + TermsOfServiceServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceClient), +) +@mock.patch.object( + TermsOfServiceServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = TermsOfServiceServiceClient._DEFAULT_UNIVERSE + default_endpoint = TermsOfServiceServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = TermsOfServiceServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == TermsOfServiceServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == TermsOfServiceServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == TermsOfServiceServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + TermsOfServiceServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + TermsOfServiceServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + TermsOfServiceServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + TermsOfServiceServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + TermsOfServiceServiceClient._get_universe_domain(None, None) + == TermsOfServiceServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + TermsOfServiceServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TermsOfServiceServiceClient, "grpc"), + (TermsOfServiceServiceAsyncClient, "grpc_asyncio"), + (TermsOfServiceServiceClient, "rest"), + ], +) +def test_terms_of_service_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TermsOfServiceServiceGrpcTransport, "grpc"), + (transports.TermsOfServiceServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TermsOfServiceServiceRestTransport, "rest"), + ], +) +def test_terms_of_service_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TermsOfServiceServiceClient, "grpc"), + (TermsOfServiceServiceAsyncClient, "grpc_asyncio"), + (TermsOfServiceServiceClient, "rest"), + ], +) +def test_terms_of_service_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_terms_of_service_service_client_get_transport_class(): + transport = TermsOfServiceServiceClient.get_transport_class() + available_transports = [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceRestTransport, + ] + assert transport in available_transports + + transport = TermsOfServiceServiceClient.get_transport_class("grpc") + assert transport == transports.TermsOfServiceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + ), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + TermsOfServiceServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceClient), +) +@mock.patch.object( + TermsOfServiceServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceAsyncClient), +) +def test_terms_of_service_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TermsOfServiceServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TermsOfServiceServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + "true", + ), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + "false", + ), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceRestTransport, + "rest", + "true", + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + TermsOfServiceServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceClient), +) +@mock.patch.object( + TermsOfServiceServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_terms_of_service_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [TermsOfServiceServiceClient, TermsOfServiceServiceAsyncClient] +) +@mock.patch.object( + TermsOfServiceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TermsOfServiceServiceClient), +) +@mock.patch.object( + TermsOfServiceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TermsOfServiceServiceAsyncClient), +) +def test_terms_of_service_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [TermsOfServiceServiceClient, TermsOfServiceServiceAsyncClient] +) +@mock.patch.object( + TermsOfServiceServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceClient), +) +@mock.patch.object( + TermsOfServiceServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TermsOfServiceServiceAsyncClient), +) +def test_terms_of_service_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = TermsOfServiceServiceClient._DEFAULT_UNIVERSE + default_endpoint = TermsOfServiceServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = TermsOfServiceServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + ), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceRestTransport, + "rest", + ), + ], +) +def test_terms_of_service_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceRestTransport, + "rest", + None, + ), + ], +) +def test_terms_of_service_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_terms_of_service_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_service.transports.TermsOfServiceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TermsOfServiceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TermsOfServiceServiceClient, + transports.TermsOfServiceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_terms_of_service_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofservice.GetTermsOfServiceRequest, + dict, + ], +) +def test_get_terms_of_service(request_type, transport: str = "grpc"): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + response = client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = termsofservice.GetTermsOfServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, termsofservice.TermsOfService) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert response.kind == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + assert response.file_uri == "file_uri_value" + assert response.external is True + + +def test_get_terms_of_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_terms_of_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.GetTermsOfServiceRequest() + + +def test_get_terms_of_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = termsofservice.GetTermsOfServiceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_terms_of_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.GetTermsOfServiceRequest( + name="name_value", + ) + + +def test_get_terms_of_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_terms_of_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_terms_of_service + ] = mock_rpc + request = {} + client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_terms_of_service_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + ) + response = await client.get_terms_of_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.GetTermsOfServiceRequest() + + +@pytest.mark.asyncio +async def test_get_terms_of_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_terms_of_service + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_terms_of_service + ] = mock_object + + request = {} + await client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_terms_of_service_async( + transport: str = "grpc_asyncio", + request_type=termsofservice.GetTermsOfServiceRequest, +): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + ) + response = await client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = termsofservice.GetTermsOfServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, termsofservice.TermsOfService) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert response.kind == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + assert response.file_uri == "file_uri_value" + assert response.external is True + + +@pytest.mark.asyncio +async def test_get_terms_of_service_async_from_dict(): + await test_get_terms_of_service_async(request_type=dict) + + +def test_get_terms_of_service_field_headers(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = termsofservice.GetTermsOfServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + call.return_value = termsofservice.TermsOfService() + client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_terms_of_service_field_headers_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = termsofservice.GetTermsOfServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofservice.TermsOfService() + ) + await client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_terms_of_service_flattened(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofservice.TermsOfService() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_terms_of_service( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_terms_of_service_flattened_error(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_terms_of_service( + termsofservice.GetTermsOfServiceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_terms_of_service_flattened_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofservice.TermsOfService() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofservice.TermsOfService() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_terms_of_service( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_terms_of_service_flattened_error_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_terms_of_service( + termsofservice.GetTermsOfServiceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofservice.RetrieveLatestTermsOfServiceRequest, + dict, + ], +) +def test_retrieve_latest_terms_of_service(request_type, transport: str = "grpc"): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_latest_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + response = client.retrieve_latest_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = termsofservice.RetrieveLatestTermsOfServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, termsofservice.TermsOfService) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert response.kind == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + assert response.file_uri == "file_uri_value" + assert response.external is True + + +def test_retrieve_latest_terms_of_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_latest_terms_of_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.retrieve_latest_terms_of_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.RetrieveLatestTermsOfServiceRequest() + + +def test_retrieve_latest_terms_of_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = termsofservice.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_latest_terms_of_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.retrieve_latest_terms_of_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + ) + + +def test_retrieve_latest_terms_of_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.retrieve_latest_terms_of_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.retrieve_latest_terms_of_service + ] = mock_rpc + request = {} + client.retrieve_latest_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.retrieve_latest_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_latest_terms_of_service_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_latest_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + ) + response = await client.retrieve_latest_terms_of_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.RetrieveLatestTermsOfServiceRequest() + + +@pytest.mark.asyncio +async def test_retrieve_latest_terms_of_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.retrieve_latest_terms_of_service + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.retrieve_latest_terms_of_service + ] = mock_object + + request = {} + await client.retrieve_latest_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.retrieve_latest_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_latest_terms_of_service_async( + transport: str = "grpc_asyncio", + request_type=termsofservice.RetrieveLatestTermsOfServiceRequest, +): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_latest_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + ) + response = await client.retrieve_latest_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = termsofservice.RetrieveLatestTermsOfServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, termsofservice.TermsOfService) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert response.kind == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + assert response.file_uri == "file_uri_value" + assert response.external is True + + +@pytest.mark.asyncio +async def test_retrieve_latest_terms_of_service_async_from_dict(): + await test_retrieve_latest_terms_of_service_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofservice.AcceptTermsOfServiceRequest, + dict, + ], +) +def test_accept_terms_of_service(request_type, transport: str = "grpc"): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = termsofservice.AcceptTermsOfServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_accept_terms_of_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.accept_terms_of_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.AcceptTermsOfServiceRequest() + + +def test_accept_terms_of_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = termsofservice.AcceptTermsOfServiceRequest( + name="name_value", + account="account_value", + region_code="region_code_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.accept_terms_of_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.AcceptTermsOfServiceRequest( + name="name_value", + account="account_value", + region_code="region_code_value", + ) + + +def test_accept_terms_of_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.accept_terms_of_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.accept_terms_of_service + ] = mock_rpc + request = {} + client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.accept_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.accept_terms_of_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == termsofservice.AcceptTermsOfServiceRequest() + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.accept_terms_of_service + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.accept_terms_of_service + ] = mock_object + + request = {} + await client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.accept_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_async( + transport: str = "grpc_asyncio", + request_type=termsofservice.AcceptTermsOfServiceRequest, +): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = termsofservice.AcceptTermsOfServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_async_from_dict(): + await test_accept_terms_of_service_async(request_type=dict) + + +def test_accept_terms_of_service_field_headers(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = termsofservice.AcceptTermsOfServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + call.return_value = None + client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_field_headers_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = termsofservice.AcceptTermsOfServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_accept_terms_of_service_flattened(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.accept_terms_of_service( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_accept_terms_of_service_flattened_error(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.accept_terms_of_service( + termsofservice.AcceptTermsOfServiceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_flattened_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.accept_terms_of_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.accept_terms_of_service( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_accept_terms_of_service_flattened_error_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.accept_terms_of_service( + termsofservice.AcceptTermsOfServiceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofservice.GetTermsOfServiceRequest, + dict, + ], +) +def test_get_terms_of_service_rest(request_type): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "termsOfService/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofservice.TermsOfService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_terms_of_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, termsofservice.TermsOfService) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert response.kind == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + assert response.file_uri == "file_uri_value" + assert response.external is True + + +def test_get_terms_of_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_terms_of_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_terms_of_service + ] = mock_rpc + + request = {} + client.get_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_terms_of_service_rest_required_fields( + request_type=termsofservice.GetTermsOfServiceRequest, +): + transport_class = transports.TermsOfServiceServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_terms_of_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_terms_of_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = termsofservice.TermsOfService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = termsofservice.TermsOfService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_terms_of_service(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_terms_of_service_rest_unset_required_fields(): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_terms_of_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_terms_of_service_rest_interceptors(null_interceptor): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TermsOfServiceServiceRestInterceptor(), + ) + client = TermsOfServiceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TermsOfServiceServiceRestInterceptor, "post_get_terms_of_service" + ) as post, mock.patch.object( + transports.TermsOfServiceServiceRestInterceptor, "pre_get_terms_of_service" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = termsofservice.GetTermsOfServiceRequest.pb( + termsofservice.GetTermsOfServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = termsofservice.TermsOfService.to_json( + termsofservice.TermsOfService() + ) + + request = termsofservice.GetTermsOfServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = termsofservice.TermsOfService() + + client.get_terms_of_service( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_terms_of_service_rest_bad_request( + transport: str = "rest", request_type=termsofservice.GetTermsOfServiceRequest +): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "termsOfService/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_terms_of_service(request) + + +def test_get_terms_of_service_rest_flattened(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofservice.TermsOfService() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "termsOfService/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofservice.TermsOfService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_terms_of_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=termsOfService/*}" % client.transport._host, + args[1], + ) + + +def test_get_terms_of_service_rest_flattened_error(transport: str = "rest"): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_terms_of_service( + termsofservice.GetTermsOfServiceRequest(), + name="name_value", + ) + + +def test_get_terms_of_service_rest_error(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofservice.RetrieveLatestTermsOfServiceRequest, + dict, + ], +) +def test_retrieve_latest_terms_of_service_rest(request_type): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = termsofservice.TermsOfService( + name="name_value", + region_code="region_code_value", + kind=termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER, + file_uri="file_uri_value", + external=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = termsofservice.TermsOfService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_latest_terms_of_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, termsofservice.TermsOfService) + assert response.name == "name_value" + assert response.region_code == "region_code_value" + assert response.kind == termsofservicekind.TermsOfServiceKind.MERCHANT_CENTER + assert response.file_uri == "file_uri_value" + assert response.external is True + + +def test_retrieve_latest_terms_of_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.retrieve_latest_terms_of_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.retrieve_latest_terms_of_service + ] = mock_rpc + + request = {} + client.retrieve_latest_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.retrieve_latest_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_latest_terms_of_service_rest_interceptors(null_interceptor): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TermsOfServiceServiceRestInterceptor(), + ) + client = TermsOfServiceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TermsOfServiceServiceRestInterceptor, + "post_retrieve_latest_terms_of_service", + ) as post, mock.patch.object( + transports.TermsOfServiceServiceRestInterceptor, + "pre_retrieve_latest_terms_of_service", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = termsofservice.RetrieveLatestTermsOfServiceRequest.pb( + termsofservice.RetrieveLatestTermsOfServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = termsofservice.TermsOfService.to_json( + termsofservice.TermsOfService() + ) + + request = termsofservice.RetrieveLatestTermsOfServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = termsofservice.TermsOfService() + + client.retrieve_latest_terms_of_service( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_latest_terms_of_service_rest_bad_request( + transport: str = "rest", + request_type=termsofservice.RetrieveLatestTermsOfServiceRequest, +): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_latest_terms_of_service(request) + + +def test_retrieve_latest_terms_of_service_rest_error(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + termsofservice.AcceptTermsOfServiceRequest, + dict, + ], +) +def test_accept_terms_of_service_rest(request_type): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "termsOfService/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.accept_terms_of_service(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_accept_terms_of_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.accept_terms_of_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.accept_terms_of_service + ] = mock_rpc + + request = {} + client.accept_terms_of_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.accept_terms_of_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_accept_terms_of_service_rest_required_fields( + request_type=termsofservice.AcceptTermsOfServiceRequest, +): + transport_class = transports.TermsOfServiceServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["account"] = "" + request_init["region_code"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "account" not in jsonified_request + assert "regionCode" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).accept_terms_of_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "account" in jsonified_request + assert jsonified_request["account"] == request_init["account"] + assert "regionCode" in jsonified_request + assert jsonified_request["regionCode"] == request_init["region_code"] + + jsonified_request["name"] = "name_value" + jsonified_request["account"] = "account_value" + jsonified_request["regionCode"] = "region_code_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).accept_terms_of_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "account", + "region_code", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "account" in jsonified_request + assert jsonified_request["account"] == "account_value" + assert "regionCode" in jsonified_request + assert jsonified_request["regionCode"] == "region_code_value" + + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.accept_terms_of_service(request) + + expected_params = [ + ( + "account", + "", + ), + ( + "regionCode", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_accept_terms_of_service_rest_unset_required_fields(): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.accept_terms_of_service._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "account", + "regionCode", + ) + ) + & set( + ( + "name", + "account", + "regionCode", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_accept_terms_of_service_rest_interceptors(null_interceptor): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TermsOfServiceServiceRestInterceptor(), + ) + client = TermsOfServiceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TermsOfServiceServiceRestInterceptor, "pre_accept_terms_of_service" + ) as pre: + pre.assert_not_called() + pb_message = termsofservice.AcceptTermsOfServiceRequest.pb( + termsofservice.AcceptTermsOfServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = termsofservice.AcceptTermsOfServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.accept_terms_of_service( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_accept_terms_of_service_rest_bad_request( + transport: str = "rest", request_type=termsofservice.AcceptTermsOfServiceRequest +): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "termsOfService/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.accept_terms_of_service(request) + + +def test_accept_terms_of_service_rest_flattened(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "termsOfService/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.accept_terms_of_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=termsOfService/*}:accept" + % client.transport._host, + args[1], + ) + + +def test_accept_terms_of_service_rest_flattened_error(transport: str = "rest"): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.accept_terms_of_service( + termsofservice.AcceptTermsOfServiceRequest(), + name="name_value", + ) + + +def test_accept_terms_of_service_rest_error(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TermsOfServiceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TermsOfServiceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TermsOfServiceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TermsOfServiceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TermsOfServiceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TermsOfServiceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TermsOfServiceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TermsOfServiceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TermsOfServiceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TermsOfServiceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TermsOfServiceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TermsOfServiceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + transports.TermsOfServiceServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TermsOfServiceServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TermsOfServiceServiceGrpcTransport, + ) + + +def test_terms_of_service_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TermsOfServiceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_terms_of_service_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_service.transports.TermsOfServiceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TermsOfServiceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_terms_of_service", + "retrieve_latest_terms_of_service", + "accept_terms_of_service", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_terms_of_service_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_service.transports.TermsOfServiceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TermsOfServiceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_terms_of_service_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.terms_of_service_service.transports.TermsOfServiceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TermsOfServiceServiceTransport() + adc.assert_called_once() + + +def test_terms_of_service_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TermsOfServiceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + transports.TermsOfServiceServiceRestTransport, + ], +) +def test_terms_of_service_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TermsOfServiceServiceGrpcTransport, grpc_helpers), + (transports.TermsOfServiceServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_terms_of_service_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_terms_of_service_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TermsOfServiceServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_terms_of_service_service_host_no_port(transport_name): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_terms_of_service_service_host_with_port(transport_name): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_terms_of_service_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TermsOfServiceServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TermsOfServiceServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_terms_of_service._session + session2 = client2.transport.get_terms_of_service._session + assert session1 != session2 + session1 = client1.transport.retrieve_latest_terms_of_service._session + session2 = client2.transport.retrieve_latest_terms_of_service._session + assert session1 != session2 + session1 = client1.transport.accept_terms_of_service._session + session2 = client2.transport.accept_terms_of_service._session + assert session1 != session2 + + +def test_terms_of_service_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TermsOfServiceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_terms_of_service_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TermsOfServiceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TermsOfServiceServiceGrpcTransport, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + ], +) +def test_terms_of_service_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = TermsOfServiceServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = TermsOfServiceServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_account_path(path) + assert expected == actual + + +def test_terms_of_service_path(): + version = "whelk" + expected = "termsOfService/{version}".format( + version=version, + ) + actual = TermsOfServiceServiceClient.terms_of_service_path(version) + assert expected == actual + + +def test_parse_terms_of_service_path(): + expected = { + "version": "octopus", + } + path = TermsOfServiceServiceClient.terms_of_service_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_terms_of_service_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TermsOfServiceServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = TermsOfServiceServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TermsOfServiceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = TermsOfServiceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TermsOfServiceServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = TermsOfServiceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = TermsOfServiceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = TermsOfServiceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TermsOfServiceServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = TermsOfServiceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TermsOfServiceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TermsOfServiceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TermsOfServiceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TermsOfServiceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TermsOfServiceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TermsOfServiceServiceClient, transports.TermsOfServiceServiceGrpcTransport), + ( + TermsOfServiceServiceAsyncClient, + transports.TermsOfServiceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py new file mode 100644 index 000000000000..7ab2425ba0eb --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py @@ -0,0 +1,5644 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.user_service import ( + UserServiceAsyncClient, + UserServiceClient, + pagers, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import accessright +from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import user as gsma_user + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert UserServiceClient._get_default_mtls_endpoint(None) is None + assert ( + UserServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + UserServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + UserServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + UserServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert UserServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert UserServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert UserServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert UserServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + UserServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert UserServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert UserServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert UserServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + UserServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert UserServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert UserServiceClient._get_client_cert_source(None, False) is None + assert ( + UserServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + UserServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + UserServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + UserServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + UserServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceClient), +) +@mock.patch.object( + UserServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = UserServiceClient._DEFAULT_UNIVERSE + default_endpoint = UserServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = UserServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + UserServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + UserServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == UserServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + UserServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + UserServiceClient._get_api_endpoint(None, None, default_universe, "always") + == UserServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + UserServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == UserServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + UserServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + UserServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + UserServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + UserServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + UserServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + UserServiceClient._get_universe_domain(None, None) + == UserServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + UserServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc"), + (UserServiceClient, transports.UserServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (UserServiceClient, "grpc"), + (UserServiceAsyncClient, "grpc_asyncio"), + (UserServiceClient, "rest"), + ], +) +def test_user_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.UserServiceGrpcTransport, "grpc"), + (transports.UserServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.UserServiceRestTransport, "rest"), + ], +) +def test_user_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (UserServiceClient, "grpc"), + (UserServiceAsyncClient, "grpc_asyncio"), + (UserServiceClient, "rest"), + ], +) +def test_user_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_user_service_client_get_transport_class(): + transport = UserServiceClient.get_transport_class() + available_transports = [ + transports.UserServiceGrpcTransport, + transports.UserServiceRestTransport, + ] + assert transport in available_transports + + transport = UserServiceClient.get_transport_class("grpc") + assert transport == transports.UserServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc"), + ( + UserServiceAsyncClient, + transports.UserServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (UserServiceClient, transports.UserServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + UserServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceClient), +) +@mock.patch.object( + UserServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceAsyncClient), +) +def test_user_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(UserServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(UserServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc", "true"), + ( + UserServiceAsyncClient, + transports.UserServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc", "false"), + ( + UserServiceAsyncClient, + transports.UserServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (UserServiceClient, transports.UserServiceRestTransport, "rest", "true"), + (UserServiceClient, transports.UserServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + UserServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceClient), +) +@mock.patch.object( + UserServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_user_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [UserServiceClient, UserServiceAsyncClient]) +@mock.patch.object( + UserServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UserServiceClient) +) +@mock.patch.object( + UserServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserServiceAsyncClient), +) +def test_user_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [UserServiceClient, UserServiceAsyncClient]) +@mock.patch.object( + UserServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceClient), +) +@mock.patch.object( + UserServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(UserServiceAsyncClient), +) +def test_user_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = UserServiceClient._DEFAULT_UNIVERSE + default_endpoint = UserServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = UserServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc"), + ( + UserServiceAsyncClient, + transports.UserServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (UserServiceClient, transports.UserServiceRestTransport, "rest"), + ], +) +def test_user_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc", grpc_helpers), + ( + UserServiceAsyncClient, + transports.UserServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (UserServiceClient, transports.UserServiceRestTransport, "rest", None), + ], +) +def test_user_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_user_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.user_service.transports.UserServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = UserServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (UserServiceClient, transports.UserServiceGrpcTransport, "grpc", grpc_helpers), + ( + UserServiceAsyncClient, + transports.UserServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_user_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user.GetUserRequest, + dict, + ], +) +def test_get_user(request_type, transport: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user.User( + name="name_value", + state=user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + response = client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = user.GetUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, user.User) + assert response.name == "name_value" + assert response.state == user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +def test_get_user_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.GetUserRequest() + + +def test_get_user_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = user.GetUserRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_user(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.GetUserRequest( + name="name_value", + ) + + +def test_get_user_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_user] = mock_rpc + request = {} + client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_user_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user.User( + name="name_value", + state=user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + ) + response = await client.get_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.GetUserRequest() + + +@pytest.mark.asyncio +async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_user + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_user + ] = mock_object + + request = {} + await client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_user_async( + transport: str = "grpc_asyncio", request_type=user.GetUserRequest +): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user.User( + name="name_value", + state=user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + ) + response = await client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = user.GetUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, user.User) + assert response.name == "name_value" + assert response.state == user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +@pytest.mark.asyncio +async def test_get_user_async_from_dict(): + await test_get_user_async(request_type=dict) + + +def test_get_user_field_headers(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user.GetUserRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + call.return_value = user.User() + client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_user_field_headers_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user.GetUserRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(user.User()) + await client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_user_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user.User() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_user( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_user_flattened_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_user( + user.GetUserRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_user_flattened_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user.User() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(user.User()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_user( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_user_flattened_error_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_user( + user.GetUserRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_user.CreateUserRequest, + dict, + ], +) +def test_create_user(request_type, transport: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + response = client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsma_user.CreateUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_user.User) + assert response.name == "name_value" + assert response.state == gsma_user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +def test_create_user_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_user.CreateUserRequest() + + +def test_create_user_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsma_user.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_user(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_user.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", + ) + + +def test_create_user_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_user] = mock_rpc + request = {} + client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_user_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + ) + response = await client.create_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_user.CreateUserRequest() + + +@pytest.mark.asyncio +async def test_create_user_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_user + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_user + ] = mock_object + + request = {} + await client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_user_async( + transport: str = "grpc_asyncio", request_type=gsma_user.CreateUserRequest +): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + ) + response = await client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsma_user.CreateUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_user.User) + assert response.name == "name_value" + assert response.state == gsma_user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +@pytest.mark.asyncio +async def test_create_user_async_from_dict(): + await test_create_user_async(request_type=dict) + + +def test_create_user_field_headers(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_user.CreateUserRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + call.return_value = gsma_user.User() + client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_user_field_headers_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_user.CreateUserRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsma_user.User()) + await client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_user_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_user.User() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_user( + parent="parent_value", + user=gsma_user.User(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].user + mock_val = gsma_user.User(name="name_value") + assert arg == mock_val + + +def test_create_user_flattened_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_user( + gsma_user.CreateUserRequest(), + parent="parent_value", + user=gsma_user.User(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_user_flattened_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_user.User() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsma_user.User()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_user( + parent="parent_value", + user=gsma_user.User(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].user + mock_val = gsma_user.User(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_user_flattened_error_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_user( + gsma_user.CreateUserRequest(), + parent="parent_value", + user=gsma_user.User(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user.DeleteUserRequest, + dict, + ], +) +def test_delete_user(request_type, transport: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = user.DeleteUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_user_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.DeleteUserRequest() + + +def test_delete_user_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = user.DeleteUserRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_user(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.DeleteUserRequest( + name="name_value", + ) + + +def test_delete_user_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_user] = mock_rpc + request = {} + client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_user_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.DeleteUserRequest() + + +@pytest.mark.asyncio +async def test_delete_user_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_user + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_user + ] = mock_object + + request = {} + await client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_user_async( + transport: str = "grpc_asyncio", request_type=user.DeleteUserRequest +): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = user.DeleteUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_user_async_from_dict(): + await test_delete_user_async(request_type=dict) + + +def test_delete_user_field_headers(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user.DeleteUserRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value = None + client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_user_field_headers_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user.DeleteUserRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_user_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_user( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_user_flattened_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_user( + user.DeleteUserRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_user_flattened_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_user( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_user_flattened_error_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_user( + user.DeleteUserRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_user.UpdateUserRequest, + dict, + ], +) +def test_update_user(request_type, transport: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + response = client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsma_user.UpdateUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_user.User) + assert response.name == "name_value" + assert response.state == gsma_user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +def test_update_user_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_user.UpdateUserRequest() + + +def test_update_user_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsma_user.UpdateUserRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_user(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_user.UpdateUserRequest() + + +def test_update_user_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_user] = mock_rpc + request = {} + client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_user_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + ) + response = await client.update_user() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsma_user.UpdateUserRequest() + + +@pytest.mark.asyncio +async def test_update_user_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_user + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_user + ] = mock_object + + request = {} + await client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_user_async( + transport: str = "grpc_asyncio", request_type=gsma_user.UpdateUserRequest +): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + ) + response = await client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsma_user.UpdateUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_user.User) + assert response.name == "name_value" + assert response.state == gsma_user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +@pytest.mark.asyncio +async def test_update_user_async_from_dict(): + await test_update_user_async(request_type=dict) + + +def test_update_user_field_headers(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_user.UpdateUserRequest() + + request.user.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value = gsma_user.User() + client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "user.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_user_field_headers_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsma_user.UpdateUserRequest() + + request.user.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsma_user.User()) + await client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "user.name=name_value", + ) in kw["metadata"] + + +def test_update_user_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_user.User() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_user( + user=gsma_user.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].user + mock_val = gsma_user.User(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_user_flattened_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_user( + gsma_user.UpdateUserRequest(), + user=gsma_user.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_user_flattened_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsma_user.User() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsma_user.User()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_user( + user=gsma_user.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].user + mock_val = gsma_user.User(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_user_flattened_error_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_user( + gsma_user.UpdateUserRequest(), + user=gsma_user.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user.ListUsersRequest, + dict, + ], +) +def test_list_users(request_type, transport: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user.ListUsersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = user.ListUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_users_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.ListUsersRequest() + + +def test_list_users_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = user.ListUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_users(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.ListUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_users_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_users in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_users] = mock_rpc + request = {} + client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_users_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user.ListUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user.ListUsersRequest() + + +@pytest.mark.asyncio +async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_users + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_users + ] = mock_object + + request = {} + await client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_users_async( + transport: str = "grpc_asyncio", request_type=user.ListUsersRequest +): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user.ListUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = user.ListUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_users_async_from_dict(): + await test_list_users_async(request_type=dict) + + +def test_list_users_field_headers(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user.ListUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value = user.ListUsersResponse() + client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_users_field_headers_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user.ListUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user.ListUsersResponse() + ) + await client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_users_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user.ListUsersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_users_flattened_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_users( + user.ListUsersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_users_flattened_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user.ListUsersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user.ListUsersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_users_flattened_error_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_users( + user.ListUsersRequest(), + parent="parent_value", + ) + + +def test_list_users_pager(transport_name: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + user.User(), + ], + next_page_token="abc", + ), + user.ListUsersResponse( + users=[], + next_page_token="def", + ), + user.ListUsersResponse( + users=[ + user.User(), + ], + next_page_token="ghi", + ), + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_users(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, user.User) for i in results) + + +def test_list_users_pages(transport_name: str = "grpc"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + user.User(), + ], + next_page_token="abc", + ), + user.ListUsersResponse( + users=[], + next_page_token="def", + ), + user.ListUsersResponse( + users=[ + user.User(), + ], + next_page_token="ghi", + ), + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + ], + ), + RuntimeError, + ) + pages = list(client.list_users(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_users_async_pager(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + user.User(), + ], + next_page_token="abc", + ), + user.ListUsersResponse( + users=[], + next_page_token="def", + ), + user.ListUsersResponse( + users=[ + user.User(), + ], + next_page_token="ghi", + ), + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_users( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, user.User) for i in responses) + + +@pytest.mark.asyncio +async def test_list_users_async_pages(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + user.User(), + ], + next_page_token="abc", + ), + user.ListUsersResponse( + users=[], + next_page_token="def", + ), + user.ListUsersResponse( + users=[ + user.User(), + ], + next_page_token="ghi", + ), + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_users(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + user.GetUserRequest, + dict, + ], +) +def test_get_user_rest(request_type): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/users/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = user.User( + name="name_value", + state=user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_user(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, user.User) + assert response.name == "name_value" + assert response.state == user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +def test_get_user_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_user] = mock_rpc + + request = {} + client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_user_rest_required_fields(request_type=user.GetUserRequest): + transport_class = transports.UserServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_user._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_user._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = user.User() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_user(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_user_rest_unset_required_fields(): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_user._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_user_rest_interceptors(null_interceptor): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserServiceRestInterceptor(), + ) + client = UserServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserServiceRestInterceptor, "post_get_user" + ) as post, mock.patch.object( + transports.UserServiceRestInterceptor, "pre_get_user" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = user.GetUserRequest.pb(user.GetUserRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = user.User.to_json(user.User()) + + request = user.GetUserRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = user.User() + + client.get_user( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_user_rest_bad_request( + transport: str = "rest", request_type=user.GetUserRequest +): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/users/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_user(request) + + +def test_get_user_rest_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = user.User() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/users/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_user(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/users/*}" % client.transport._host, + args[1], + ) + + +def test_get_user_rest_flattened_error(transport: str = "rest"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_user( + user.GetUserRequest(), + name="name_value", + ) + + +def test_get_user_rest_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_user.CreateUserRequest, + dict, + ], +) +def test_create_user_rest(request_type): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request_init["user"] = {"name": "name_value", "state": 1, "access_rights": [1]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsma_user.CreateUserRequest.meta.fields["user"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["user"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["user"][field])): + del request_init["user"][field][i][subfield] + else: + del request_init["user"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_user(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_user.User) + assert response.name == "name_value" + assert response.state == gsma_user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +def test_create_user_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_user] = mock_rpc + + request = {} + client.create_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_user_rest_required_fields(request_type=gsma_user.CreateUserRequest): + transport_class = transports.UserServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["user_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "userId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_user._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "userId" in jsonified_request + assert jsonified_request["userId"] == request_init["user_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["userId"] = "user_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_user._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("user_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "userId" in jsonified_request + assert jsonified_request["userId"] == "user_id_value" + + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsma_user.User() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsma_user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_user(request) + + expected_params = [ + ( + "userId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_user_rest_unset_required_fields(): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_user._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("userId",)) + & set( + ( + "parent", + "userId", + "user", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_user_rest_interceptors(null_interceptor): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserServiceRestInterceptor(), + ) + client = UserServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserServiceRestInterceptor, "post_create_user" + ) as post, mock.patch.object( + transports.UserServiceRestInterceptor, "pre_create_user" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsma_user.CreateUserRequest.pb(gsma_user.CreateUserRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsma_user.User.to_json(gsma_user.User()) + + request = gsma_user.CreateUserRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsma_user.User() + + client.create_user( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_user_rest_bad_request( + transport: str = "rest", request_type=gsma_user.CreateUserRequest +): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_user(request) + + +def test_create_user_rest_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_user.User() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + user=gsma_user.User(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_user(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/users" % client.transport._host, + args[1], + ) + + +def test_create_user_rest_flattened_error(transport: str = "rest"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_user( + gsma_user.CreateUserRequest(), + parent="parent_value", + user=gsma_user.User(name="name_value"), + ) + + +def test_create_user_rest_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user.DeleteUserRequest, + dict, + ], +) +def test_delete_user_rest(request_type): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/users/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_user(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_user_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_user] = mock_rpc + + request = {} + client.delete_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_user_rest_required_fields(request_type=user.DeleteUserRequest): + transport_class = transports.UserServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_user._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_user._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_user(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_user_rest_unset_required_fields(): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_user._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_user_rest_interceptors(null_interceptor): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserServiceRestInterceptor(), + ) + client = UserServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserServiceRestInterceptor, "pre_delete_user" + ) as pre: + pre.assert_not_called() + pb_message = user.DeleteUserRequest.pb(user.DeleteUserRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = user.DeleteUserRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_user( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_user_rest_bad_request( + transport: str = "rest", request_type=user.DeleteUserRequest +): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/users/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_user(request) + + +def test_delete_user_rest_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/users/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_user(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/users/*}" % client.transport._host, + args[1], + ) + + +def test_delete_user_rest_flattened_error(transport: str = "rest"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_user( + user.DeleteUserRequest(), + name="name_value", + ) + + +def test_delete_user_rest_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsma_user.UpdateUserRequest, + dict, + ], +) +def test_update_user_rest(request_type): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"user": {"name": "accounts/sample1/users/sample2"}} + request_init["user"] = { + "name": "accounts/sample1/users/sample2", + "state": 1, + "access_rights": [1], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsma_user.UpdateUserRequest.meta.fields["user"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["user"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["user"][field])): + del request_init["user"][field][i][subfield] + else: + del request_init["user"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_user.User( + name="name_value", + state=gsma_user.User.State.PENDING, + access_rights=[accessright.AccessRight.STANDARD], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_user(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsma_user.User) + assert response.name == "name_value" + assert response.state == gsma_user.User.State.PENDING + assert response.access_rights == [accessright.AccessRight.STANDARD] + + +def test_update_user_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_user] = mock_rpc + + request = {} + client.update_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_user(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_user_rest_required_fields(request_type=gsma_user.UpdateUserRequest): + transport_class = transports.UserServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_user._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_user._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsma_user.User() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsma_user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_user(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_user_rest_unset_required_fields(): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_user._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "user", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_user_rest_interceptors(null_interceptor): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserServiceRestInterceptor(), + ) + client = UserServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserServiceRestInterceptor, "post_update_user" + ) as post, mock.patch.object( + transports.UserServiceRestInterceptor, "pre_update_user" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsma_user.UpdateUserRequest.pb(gsma_user.UpdateUserRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsma_user.User.to_json(gsma_user.User()) + + request = gsma_user.UpdateUserRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsma_user.User() + + client.update_user( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_user_rest_bad_request( + transport: str = "rest", request_type=gsma_user.UpdateUserRequest +): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"user": {"name": "accounts/sample1/users/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_user(request) + + +def test_update_user_rest_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsma_user.User() + + # get arguments that satisfy an http rule for this method + sample_request = {"user": {"name": "accounts/sample1/users/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + user=gsma_user.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsma_user.User.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_user(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{user.name=accounts/*/users/*}" + % client.transport._host, + args[1], + ) + + +def test_update_user_rest_flattened_error(transport: str = "rest"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_user( + gsma_user.UpdateUserRequest(), + user=gsma_user.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_user_rest_error(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user.ListUsersRequest, + dict, + ], +) +def test_list_users_rest(request_type): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = user.ListUsersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = user.ListUsersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_users(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_users_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_users in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_users] = mock_rpc + + request = {} + client.list_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_users_rest_required_fields(request_type=user.ListUsersRequest): + transport_class = transports.UserServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_users._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_users._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = user.ListUsersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = user.ListUsersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_users(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_users_rest_unset_required_fields(): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_users._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_users_rest_interceptors(null_interceptor): + transport = transports.UserServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserServiceRestInterceptor(), + ) + client = UserServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserServiceRestInterceptor, "post_list_users" + ) as post, mock.patch.object( + transports.UserServiceRestInterceptor, "pre_list_users" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = user.ListUsersRequest.pb(user.ListUsersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = user.ListUsersResponse.to_json( + user.ListUsersResponse() + ) + + request = user.ListUsersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = user.ListUsersResponse() + + client.list_users( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_users_rest_bad_request( + transport: str = "rest", request_type=user.ListUsersRequest +): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_users(request) + + +def test_list_users_rest_flattened(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = user.ListUsersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = user.ListUsersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_users(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{parent=accounts/*}/users" % client.transport._host, + args[1], + ) + + +def test_list_users_rest_flattened_error(transport: str = "rest"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_users( + user.ListUsersRequest(), + parent="parent_value", + ) + + +def test_list_users_rest_pager(transport: str = "rest"): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + user.User(), + ], + next_page_token="abc", + ), + user.ListUsersResponse( + users=[], + next_page_token="def", + ), + user.ListUsersResponse( + users=[ + user.User(), + ], + next_page_token="ghi", + ), + user.ListUsersResponse( + users=[ + user.User(), + user.User(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(user.ListUsersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_users(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, user.User) for i in results) + + pages = list(client.list_users(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.UserServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.UserServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UserServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.UserServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = UserServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = UserServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.UserServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UserServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.UserServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = UserServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.UserServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.UserServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserServiceGrpcTransport, + transports.UserServiceGrpcAsyncIOTransport, + transports.UserServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = UserServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.UserServiceGrpcTransport, + ) + + +def test_user_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.UserServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_user_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.user_service.transports.UserServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.UserServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_user", + "create_user", + "delete_user", + "update_user", + "list_users", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_user_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.user_service.transports.UserServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UserServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_user_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.user_service.transports.UserServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UserServiceTransport() + adc.assert_called_once() + + +def test_user_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + UserServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserServiceGrpcTransport, + transports.UserServiceGrpcAsyncIOTransport, + ], +) +def test_user_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserServiceGrpcTransport, + transports.UserServiceGrpcAsyncIOTransport, + transports.UserServiceRestTransport, + ], +) +def test_user_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.UserServiceGrpcTransport, grpc_helpers), + (transports.UserServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_user_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.UserServiceGrpcTransport, transports.UserServiceGrpcAsyncIOTransport], +) +def test_user_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_user_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.UserServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_user_service_host_no_port(transport_name): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_user_service_host_with_port(transport_name): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_user_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = UserServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = UserServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_user._session + session2 = client2.transport.get_user._session + assert session1 != session2 + session1 = client1.transport.create_user._session + session2 = client2.transport.create_user._session + assert session1 != session2 + session1 = client1.transport.delete_user._session + session2 = client2.transport.delete_user._session + assert session1 != session2 + session1 = client1.transport.update_user._session + session2 = client2.transport.update_user._session + assert session1 != session2 + session1 = client1.transport.list_users._session + session2 = client2.transport.list_users._session + assert session1 != session2 + + +def test_user_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.UserServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_user_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.UserServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.UserServiceGrpcTransport, transports.UserServiceGrpcAsyncIOTransport], +) +def test_user_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.UserServiceGrpcTransport, transports.UserServiceGrpcAsyncIOTransport], +) +def test_user_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = UserServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = UserServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_account_path(path) + assert expected == actual + + +def test_user_path(): + account = "whelk" + email = "octopus" + expected = "accounts/{account}/users/{email}".format( + account=account, + email=email, + ) + actual = UserServiceClient.user_path(account, email) + assert expected == actual + + +def test_parse_user_path(): + expected = { + "account": "oyster", + "email": "nudibranch", + } + path = UserServiceClient.user_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_user_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = UserServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = UserServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = UserServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = UserServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = UserServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = UserServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = UserServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = UserServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = UserServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = UserServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = UserServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.UserServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.UserServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = UserServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = UserServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = UserServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (UserServiceClient, transports.UserServiceGrpcTransport), + (UserServiceAsyncClient, transports.UserServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-conversions/CHANGELOG.md b/packages/google-shopping-merchant-conversions/CHANGELOG.md index ee863e84d56c..ec7873c5040e 100644 --- a/packages/google-shopping-merchant-conversions/CHANGELOG.md +++ b/packages/google-shopping-merchant-conversions/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-conversions-v0.1.0...google-shopping-merchant-conversions-v0.1.1) (2024-05-27) + + +### Documentation + +* [google-shopping-merchant-conversions] change in wording : website -> online store ([#12719](https://github.com/googleapis/google-cloud-python/issues/12719)) ([410469b](https://github.com/googleapis/google-cloud-python/commit/410469be3e48e40fe1c9b92b46eaebf3f3a30e9d)) + ## 0.1.0 (2024-04-18) diff --git a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/types/conversionsources.py b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/types/conversionsources.py index d3b15b3faadc..f00002a26530 100644 --- a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/types/conversionsources.py +++ b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/types/conversionsources.py @@ -278,8 +278,9 @@ class GoogleAnalyticsLink(proto.Message): class MerchantCenterDestination(proto.Message): r""" "Merchant Center Destination" sources can be used to send - conversion events from a website using a Google tag directly to - a Merchant Center account where the source is created. + conversion events from an online store using a Google tag + directly to a Merchant Center account where the source is + created. Attributes: destination (str): diff --git a/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py b/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py index 80abb1b336f4..5e44c2ada0ad 100644 --- a/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py +++ b/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py @@ -3597,13 +3597,13 @@ def test_list_conversion_sources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversion_sources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-datasources/.OwlBot.yaml b/packages/google-shopping-merchant-datasources/.OwlBot.yaml new file mode 100644 index 000000000000..a54db635da16 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/shopping/merchant/datasources/(v.*)/.*-py + dest: /owl-bot-staging/google-shopping-merchant-datasources/$1 +api-name: google-shopping-merchant-datasources diff --git a/packages/google-shopping-merchant-datasources/.coveragerc b/packages/google-shopping-merchant-datasources/.coveragerc new file mode 100644 index 000000000000..67aeac1093c8 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/shopping/merchant_datasources/__init__.py + google/shopping/merchant_datasources/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-shopping-merchant-datasources/.flake8 b/packages/google-shopping-merchant-datasources/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-shopping-merchant-datasources/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-shopping-merchant-datasources/.gitignore b/packages/google-shopping-merchant-datasources/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-shopping-merchant-datasources/.repo-metadata.json b/packages/google-shopping-merchant-datasources/.repo-metadata.json new file mode 100644 index 000000000000..21da84d0d056 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-shopping-merchant-datasources", + "name_pretty": "Merchant API", + "api_description": "Programmatically manage your Merchant Center accounts.", + "product_documentation": "https://developers.google.com/merchant/api", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-datasources/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-shopping-merchant-datasources", + "api_id": "datasources.googleapis.com", + "default_version": "v1beta", + "codeowner_team": "", + "api_shortname": "datasources" +} diff --git a/packages/google-shopping-merchant-datasources/CHANGELOG.md b/packages/google-shopping-merchant-datasources/CHANGELOG.md new file mode 100644 index 000000000000..fb08a6d3b6b5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-06-05) + + +### Features + +* add initial files for google.shopping.merchant.datasources.v1beta ([#12772](https://github.com/googleapis/google-cloud-python/issues/12772)) ([8aedd28](https://github.com/googleapis/google-cloud-python/commit/8aedd289e38b549d84fd7a2e19b3685fc377cc2a)) + +## Changelog diff --git a/packages/google-shopping-merchant-datasources/CODE_OF_CONDUCT.md b/packages/google-shopping-merchant-datasources/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-shopping-merchant-datasources/CONTRIBUTING.rst b/packages/google-shopping-merchant-datasources/CONTRIBUTING.rst new file mode 100644 index 000000000000..d0d396fddf2d --- /dev/null +++ b/packages/google-shopping-merchant-datasources/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-shopping-merchant-datasources + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-shopping-merchant-datasources/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-shopping-merchant-datasources/LICENSE b/packages/google-shopping-merchant-datasources/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-shopping-merchant-datasources/MANIFEST.in b/packages/google-shopping-merchant-datasources/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-shopping-merchant-datasources/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-shopping-merchant-datasources/README.rst b/packages/google-shopping-merchant-datasources/README.rst new file mode 100644 index 000000000000..7340d1d0c7f8 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/README.rst @@ -0,0 +1,108 @@ +Python Client for Merchant API +============================== + +|preview| |pypi| |versions| + +`Merchant API`_: Programmatically manage your Merchant Center accounts. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-shopping-merchant-datasources.svg + :target: https://pypi.org/project/google-shopping-merchant-datasources/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-shopping-merchant-datasources.svg + :target: https://pypi.org/project/google-shopping-merchant-datasources/ +.. _Merchant API: https://developers.google.com/merchant/api +.. _Client Library Documentation: https://googleapis.dev/python/google-shopping-merchant-datasources/latest +.. _Product Documentation: https://developers.google.com/merchant/api + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Merchant API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Merchant API.: https://developers.google.com/merchant/api +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-shopping-merchant-datasources + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-shopping-merchant-datasources + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Merchant API + to see other available methods on the client. +- Read the `Merchant API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Merchant API Product documentation: https://developers.google.com/merchant/api +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-shopping-merchant-datasources/docs/CHANGELOG.md b/packages/google-shopping-merchant-datasources/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-shopping-merchant-datasources/docs/README.rst b/packages/google-shopping-merchant-datasources/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-shopping-merchant-datasources/docs/_static/custom.css b/packages/google-shopping-merchant-datasources/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-shopping-merchant-datasources/docs/_templates/layout.html b/packages/google-shopping-merchant-datasources/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-shopping-merchant-datasources/docs/conf.py b/packages/google-shopping-merchant-datasources/docs/conf.py new file mode 100644 index 000000000000..98c379590d16 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-shopping-merchant-datasources documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-shopping-merchant-datasources" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-shopping-merchant-datasources", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-shopping-merchant-datasources-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-shopping-merchant-datasources.tex", + "google-shopping-merchant-datasources Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-shopping-merchant-datasources", + "google-shopping-merchant-datasources Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-shopping-merchant-datasources", + "google-shopping-merchant-datasources Documentation", + author, + "google-shopping-merchant-datasources", + "google-shopping-merchant-datasources Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-shopping-merchant-datasources/docs/index.rst b/packages/google-shopping-merchant-datasources/docs/index.rst new file mode 100644 index 000000000000..759111d4bb55 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + merchant_datasources_v1beta/services_ + merchant_datasources_v1beta/types_ + + +Changelog +--------- + +For a list of all ``google-shopping-merchant-datasources`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/data_sources_service.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/data_sources_service.rst new file mode 100644 index 000000000000..0adda9607d6f --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/data_sources_service.rst @@ -0,0 +1,10 @@ +DataSourcesService +------------------------------------ + +.. automodule:: google.shopping.merchant_datasources_v1beta.services.data_sources_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_datasources_v1beta.services.data_sources_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst new file mode 100644 index 000000000000..e81d9b15d547 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst @@ -0,0 +1,6 @@ +Services for Google Shopping Merchant Datasources v1beta API +============================================================ +.. toctree:: + :maxdepth: 2 + + data_sources_service diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/types_.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/types_.rst new file mode 100644 index 000000000000..567ff11ee41e --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Shopping Merchant Datasources v1beta API +========================================================= + +.. automodule:: google.shopping.merchant_datasources_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-shopping-merchant-datasources/docs/multiprocessing.rst b/packages/google-shopping-merchant-datasources/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py new file mode 100644 index 000000000000..5c29874728d6 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_datasources import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.shopping.merchant_datasources_v1beta.services.data_sources_service.async_client import ( + DataSourcesServiceAsyncClient, +) +from google.shopping.merchant_datasources_v1beta.services.data_sources_service.client import ( + DataSourcesServiceClient, +) +from google.shopping.merchant_datasources_v1beta.types.datasources import ( + CreateDataSourceRequest, + DataSource, + DeleteDataSourceRequest, + FetchDataSourceRequest, + GetDataSourceRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + UpdateDataSourceRequest, +) +from google.shopping.merchant_datasources_v1beta.types.datasourcetypes import ( + LocalInventoryDataSource, + PrimaryProductDataSource, + PromotionDataSource, + RegionalInventoryDataSource, + SupplementalProductDataSource, +) +from google.shopping.merchant_datasources_v1beta.types.fileinputs import FileInput + +__all__ = ( + "DataSourcesServiceClient", + "DataSourcesServiceAsyncClient", + "CreateDataSourceRequest", + "DataSource", + "DeleteDataSourceRequest", + "FetchDataSourceRequest", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "UpdateDataSourceRequest", + "LocalInventoryDataSource", + "PrimaryProductDataSource", + "PromotionDataSource", + "RegionalInventoryDataSource", + "SupplementalProductDataSource", + "FileInput", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/py.typed b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/py.typed new file mode 100644 index 000000000000..035cdd22912c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-datasources package uses inline types. diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py new file mode 100644 index 000000000000..7819b598ee4a --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.data_sources_service import ( + DataSourcesServiceAsyncClient, + DataSourcesServiceClient, +) +from .types.datasources import ( + CreateDataSourceRequest, + DataSource, + DeleteDataSourceRequest, + FetchDataSourceRequest, + GetDataSourceRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + UpdateDataSourceRequest, +) +from .types.datasourcetypes import ( + LocalInventoryDataSource, + PrimaryProductDataSource, + PromotionDataSource, + RegionalInventoryDataSource, + SupplementalProductDataSource, +) +from .types.fileinputs import FileInput + +__all__ = ( + "DataSourcesServiceAsyncClient", + "CreateDataSourceRequest", + "DataSource", + "DataSourcesServiceClient", + "DeleteDataSourceRequest", + "FetchDataSourceRequest", + "FileInput", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "LocalInventoryDataSource", + "PrimaryProductDataSource", + "PromotionDataSource", + "RegionalInventoryDataSource", + "SupplementalProductDataSource", + "UpdateDataSourceRequest", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..22bf4c71ef65 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json @@ -0,0 +1,118 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.shopping.merchant_datasources_v1beta", + "protoPackage": "google.shopping.merchant.datasources.v1beta", + "schema": "1.0", + "services": { + "DataSourcesService": { + "clients": { + "grpc": { + "libraryClient": "DataSourcesServiceClient", + "rpcs": { + "CreateDataSource": { + "methods": [ + "create_data_source" + ] + }, + "DeleteDataSource": { + "methods": [ + "delete_data_source" + ] + }, + "FetchDataSource": { + "methods": [ + "fetch_data_source" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataSourcesServiceAsyncClient", + "rpcs": { + "CreateDataSource": { + "methods": [ + "create_data_source" + ] + }, + "DeleteDataSource": { + "methods": [ + "delete_data_source" + ] + }, + "FetchDataSource": { + "methods": [ + "fetch_data_source" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] + } + } + }, + "rest": { + "libraryClient": "DataSourcesServiceClient", + "rpcs": { + "CreateDataSource": { + "methods": [ + "create_data_source" + ] + }, + "DeleteDataSource": { + "methods": [ + "delete_data_source" + ] + }, + "FetchDataSource": { + "methods": [ + "fetch_data_source" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] + } + } + } + } + } + } +} diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/py.typed b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/py.typed new file mode 100644 index 000000000000..035cdd22912c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-datasources package uses inline types. diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/__init__.py new file mode 100644 index 000000000000..e2dad0a1a7e5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DataSourcesServiceAsyncClient +from .client import DataSourcesServiceClient + +__all__ = ( + "DataSourcesServiceClient", + "DataSourcesServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py new file mode 100644 index 000000000000..bfa3d4f326a6 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py @@ -0,0 +1,941 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta.services.data_sources_service import ( + pagers, +) +from google.shopping.merchant_datasources_v1beta.types import ( + datasources, + datasourcetypes, + fileinputs, +) + +from .client import DataSourcesServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DataSourcesServiceTransport +from .transports.grpc_asyncio import DataSourcesServiceGrpcAsyncIOTransport + + +class DataSourcesServiceAsyncClient: + """Service to manage primary, supplemental, inventory and other data + sources. See more in the `Merchant + Center `__ help + article. + """ + + _client: DataSourcesServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataSourcesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataSourcesServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataSourcesServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataSourcesServiceClient._DEFAULT_UNIVERSE + + data_source_path = staticmethod(DataSourcesServiceClient.data_source_path) + parse_data_source_path = staticmethod( + DataSourcesServiceClient.parse_data_source_path + ) + common_billing_account_path = staticmethod( + DataSourcesServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataSourcesServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DataSourcesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DataSourcesServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DataSourcesServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DataSourcesServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DataSourcesServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DataSourcesServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DataSourcesServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DataSourcesServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataSourcesServiceAsyncClient: The constructed client. + """ + return DataSourcesServiceClient.from_service_account_info.__func__(DataSourcesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataSourcesServiceAsyncClient: The constructed client. + """ + return DataSourcesServiceClient.from_service_account_file.__func__(DataSourcesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataSourcesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataSourcesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataSourcesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(DataSourcesServiceClient).get_transport_class, + type(DataSourcesServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + DataSourcesServiceTransport, + Callable[..., DataSourcesServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data sources service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataSourcesServiceTransport,Callable[..., DataSourcesServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataSourcesServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataSourcesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_data_source( + self, + request: Optional[Union[datasources.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datasources.DataSource: + r"""Retrieves the data source configuration for the given + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_get_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.GetDataSourceRequest, dict]]): + The request object. Request message for the GetDataSource + method. + name (:class:`str`): + Required. The name of the data source to retrieve. + Format: ``accounts/{account}/dataSources/{datasource}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.DataSource: + The [data source](\ https://support.google.com/merchants/answer/7439058) for + the Merchant Center account. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.GetDataSourceRequest): + request = datasources.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: Optional[Union[datasources.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists the configurations for data sources for the + given account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_list_data_sources(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.ListDataSourcesRequest, dict]]): + The request object. Request message for the + ListDataSources method. + parent (:class:`str`): + Required. The account to list data sources for. Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.services.data_sources_service.pagers.ListDataSourcesAsyncPager: + Response message for the + ListDataSources method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.ListDataSourcesRequest): + request = datasources.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_sources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_data_source( + self, + request: Optional[Union[datasources.CreateDataSourceRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_source: Optional[datasources.DataSource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datasources.DataSource: + r"""Creates the new data source configuration for the + given account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_create_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.CreateDataSourceRequest( + parent="parent_value", + data_source=data_source, + ) + + # Make the request + response = await client.create_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.CreateDataSourceRequest, dict]]): + The request object. Request message for the + CreateDataSource method. + parent (:class:`str`): + Required. The account where this data source will be + created. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_source (:class:`google.shopping.merchant_datasources_v1beta.types.DataSource`): + Required. The data source to create. + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.DataSource: + The [data source](\ https://support.google.com/merchants/answer/7439058) for + the Merchant Center account. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_source]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.CreateDataSourceRequest): + request = datasources.CreateDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_source is not None: + request.data_source = data_source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_source( + self, + request: Optional[Union[datasources.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[datasources.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datasources.DataSource: + r"""Updates the existing data source configuration. The + fields that are set in the update mask but not provided + in the resource will be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_update_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.UpdateDataSourceRequest( + data_source=data_source, + ) + + # Make the request + response = await client.update_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.UpdateDataSourceRequest, dict]]): + The request object. Request message for the + UpdateDataSource method. + data_source (:class:`google.shopping.merchant_datasources_v1beta.types.DataSource`): + Required. The data source resource to + update. + + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of data source fields to be updated. + + Fields specified in the update mask without a value + specified in the body will be deleted from the data + source. + + Providing special "*" value for full data source + replacement is not supported. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.DataSource: + The [data source](\ https://support.google.com/merchants/answer/7439058) for + the Merchant Center account. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.UpdateDataSourceRequest): + request = datasources.UpdateDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_data_source( + self, + request: Optional[Union[datasources.DeleteDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a data source from your Merchant Center + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_delete_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.DeleteDataSourceRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_source(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.DeleteDataSourceRequest, dict]]): + The request object. Request message for the + DeleteDataSource method. + name (:class:`str`): + Required. The name of the data source to delete. Format: + ``accounts/{account}/dataSources/{datasource}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.DeleteDataSourceRequest): + request = datasources.DeleteDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def fetch_data_source( + self, + request: Optional[Union[datasources.FetchDataSourceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Performs the data fetch immediately (even outside + fetch schedule) on a data source from your Merchant + Center Account. If you need to call this method more + than once per day, you should use the Products service + to update your product data instead. + This method only works on data sources with a file input + set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_fetch_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.FetchDataSourceRequest( + name="name_value", + ) + + # Make the request + await client.fetch_data_source(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.FetchDataSourceRequest, dict]]): + The request object. Request message for the + FetchDataSource method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.FetchDataSourceRequest): + request = datasources.FetchDataSourceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "DataSourcesServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataSourcesServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py new file mode 100644 index 000000000000..ed60ec01a9a6 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py @@ -0,0 +1,1346 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta.services.data_sources_service import ( + pagers, +) +from google.shopping.merchant_datasources_v1beta.types import ( + datasources, + datasourcetypes, + fileinputs, +) + +from .transports.base import DEFAULT_CLIENT_INFO, DataSourcesServiceTransport +from .transports.grpc import DataSourcesServiceGrpcTransport +from .transports.grpc_asyncio import DataSourcesServiceGrpcAsyncIOTransport +from .transports.rest import DataSourcesServiceRestTransport + + +class DataSourcesServiceClientMeta(type): + """Metaclass for the DataSourcesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataSourcesServiceTransport]] + _transport_registry["grpc"] = DataSourcesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataSourcesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataSourcesServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DataSourcesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataSourcesServiceClient(metaclass=DataSourcesServiceClientMeta): + """Service to manage primary, supplemental, inventory and other data + sources. See more in the `Merchant + Center `__ help + article. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataSourcesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataSourcesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataSourcesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataSourcesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_source_path( + account: str, + datasource: str, + ) -> str: + """Returns a fully-qualified data_source string.""" + return "accounts/{account}/dataSources/{datasource}".format( + account=account, + datasource=datasource, + ) + + @staticmethod + def parse_data_source_path(path: str) -> Dict[str, str]: + """Parses a data_source path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/dataSources/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DataSourcesServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DataSourcesServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataSourcesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataSourcesServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DataSourcesServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DataSourcesServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + DataSourcesServiceTransport, + Callable[..., DataSourcesServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data sources service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataSourcesServiceTransport,Callable[..., DataSourcesServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataSourcesServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DataSourcesServiceClient._read_environment_variables() + self._client_cert_source = DataSourcesServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DataSourcesServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataSourcesServiceTransport) + if transport_provided: + # transport is a DataSourcesServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataSourcesServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DataSourcesServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[DataSourcesServiceTransport], + Callable[..., DataSourcesServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataSourcesServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_data_source( + self, + request: Optional[Union[datasources.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datasources.DataSource: + r"""Retrieves the data source configuration for the given + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_get_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.GetDataSourceRequest, dict]): + The request object. Request message for the GetDataSource + method. + name (str): + Required. The name of the data source to retrieve. + Format: ``accounts/{account}/dataSources/{datasource}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.DataSource: + The [data source](\ https://support.google.com/merchants/answer/7439058) for + the Merchant Center account. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.GetDataSourceRequest): + request = datasources.GetDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: Optional[Union[datasources.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists the configurations for data sources for the + given account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_list_data_sources(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.ListDataSourcesRequest, dict]): + The request object. Request message for the + ListDataSources method. + parent (str): + Required. The account to list data sources for. Format: + ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.services.data_sources_service.pagers.ListDataSourcesPager: + Response message for the + ListDataSources method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.ListDataSourcesRequest): + request = datasources.ListDataSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_data_source( + self, + request: Optional[Union[datasources.CreateDataSourceRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_source: Optional[datasources.DataSource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datasources.DataSource: + r"""Creates the new data source configuration for the + given account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_create_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.CreateDataSourceRequest( + parent="parent_value", + data_source=data_source, + ) + + # Make the request + response = client.create_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.CreateDataSourceRequest, dict]): + The request object. Request message for the + CreateDataSource method. + parent (str): + Required. The account where this data source will be + created. Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_source (google.shopping.merchant_datasources_v1beta.types.DataSource): + Required. The data source to create. + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.DataSource: + The [data source](\ https://support.google.com/merchants/answer/7439058) for + the Merchant Center account. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_source]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.CreateDataSourceRequest): + request = datasources.CreateDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_source is not None: + request.data_source = data_source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_source( + self, + request: Optional[Union[datasources.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[datasources.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datasources.DataSource: + r"""Updates the existing data source configuration. The + fields that are set in the update mask but not provided + in the resource will be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_update_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.UpdateDataSourceRequest( + data_source=data_source, + ) + + # Make the request + response = client.update_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.UpdateDataSourceRequest, dict]): + The request object. Request message for the + UpdateDataSource method. + data_source (google.shopping.merchant_datasources_v1beta.types.DataSource): + Required. The data source resource to + update. + + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of data source fields to be updated. + + Fields specified in the update mask without a value + specified in the body will be deleted from the data + source. + + Providing special "*" value for full data source + replacement is not supported. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.DataSource: + The [data source](\ https://support.google.com/merchants/answer/7439058) for + the Merchant Center account. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.UpdateDataSourceRequest): + request = datasources.UpdateDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_data_source( + self, + request: Optional[Union[datasources.DeleteDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a data source from your Merchant Center + account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_delete_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.DeleteDataSourceRequest( + name="name_value", + ) + + # Make the request + client.delete_data_source(request=request) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.DeleteDataSourceRequest, dict]): + The request object. Request message for the + DeleteDataSource method. + name (str): + Required. The name of the data source to delete. Format: + ``accounts/{account}/dataSources/{datasource}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.DeleteDataSourceRequest): + request = datasources.DeleteDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def fetch_data_source( + self, + request: Optional[Union[datasources.FetchDataSourceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Performs the data fetch immediately (even outside + fetch schedule) on a data source from your Merchant + Center Account. If you need to call this method more + than once per day, you should use the Products service + to update your product data instead. + This method only works on data sources with a file input + set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_fetch_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.FetchDataSourceRequest( + name="name_value", + ) + + # Make the request + client.fetch_data_source(request=request) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.FetchDataSourceRequest, dict]): + The request object. Request message for the + FetchDataSource method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasources.FetchDataSourceRequest): + request = datasources.FetchDataSourceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DataSourcesServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataSourcesServiceClient",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/pagers.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/pagers.py new file mode 100644 index 000000000000..ce367e247b23 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_datasources_v1beta.types import datasources + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_datasources_v1beta.types.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_datasources_v1beta.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datasources.ListDataSourcesResponse], + request: datasources.ListDataSourcesRequest, + response: datasources.ListDataSourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_datasources_v1beta.types.ListDataSourcesRequest): + The initial request object. + response (google.shopping.merchant_datasources_v1beta.types.ListDataSourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datasources.ListDataSourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datasources.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datasources.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_datasources_v1beta.types.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_datasources_v1beta.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datasources.ListDataSourcesResponse]], + request: datasources.ListDataSourcesRequest, + response: datasources.ListDataSourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_datasources_v1beta.types.ListDataSourcesRequest): + The initial request object. + response (google.shopping.merchant_datasources_v1beta.types.ListDataSourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datasources.ListDataSourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datasources.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[datasources.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/__init__.py new file mode 100644 index 000000000000..45d896be22b3 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataSourcesServiceTransport +from .grpc import DataSourcesServiceGrpcTransport +from .grpc_asyncio import DataSourcesServiceGrpcAsyncIOTransport +from .rest import DataSourcesServiceRestInterceptor, DataSourcesServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DataSourcesServiceTransport]] +_transport_registry["grpc"] = DataSourcesServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataSourcesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DataSourcesServiceRestTransport + +__all__ = ( + "DataSourcesServiceTransport", + "DataSourcesServiceGrpcTransport", + "DataSourcesServiceGrpcAsyncIOTransport", + "DataSourcesServiceRestTransport", + "DataSourcesServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/base.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/base.py new file mode 100644 index 000000000000..0e0ad864077e --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version +from google.shopping.merchant_datasources_v1beta.types import datasources + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DataSourcesServiceTransport(abc.ABC): + """Abstract transport class for DataSourcesService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_timeout=None, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_timeout=None, + client_info=client_info, + ), + self.create_data_source: gapic_v1.method.wrap_method( + self.create_data_source, + default_timeout=None, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method.wrap_method( + self.update_data_source, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_source: gapic_v1.method.wrap_method( + self.delete_data_source, + default_timeout=None, + client_info=client_info, + ), + self.fetch_data_source: gapic_v1.method.wrap_method( + self.fetch_data_source, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_data_source( + self, + ) -> Callable[ + [datasources.GetDataSourceRequest], + Union[datasources.DataSource, Awaitable[datasources.DataSource]], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> Callable[ + [datasources.ListDataSourcesRequest], + Union[ + datasources.ListDataSourcesResponse, + Awaitable[datasources.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_data_source( + self, + ) -> Callable[ + [datasources.CreateDataSourceRequest], + Union[datasources.DataSource, Awaitable[datasources.DataSource]], + ]: + raise NotImplementedError() + + @property + def update_data_source( + self, + ) -> Callable[ + [datasources.UpdateDataSourceRequest], + Union[datasources.DataSource, Awaitable[datasources.DataSource]], + ]: + raise NotImplementedError() + + @property + def delete_data_source( + self, + ) -> Callable[ + [datasources.DeleteDataSourceRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def fetch_data_source( + self, + ) -> Callable[ + [datasources.FetchDataSourceRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DataSourcesServiceTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/grpc.py similarity index 66% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc.py rename to packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/grpc.py index a933d25876ff..e3bdf05277d3 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/grpc.py @@ -23,17 +23,18 @@ from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.shopping.merchant_datasources_v1beta.types import datasources -from .base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport +from .base import DEFAULT_CLIENT_INFO, DataSourcesServiceTransport -class MapsPlatformDatasetsV1AlphaGrpcTransport(MapsPlatformDatasetsV1AlphaTransport): - """gRPC backend transport for MapsPlatformDatasetsV1Alpha. +class DataSourcesServiceGrpcTransport(DataSourcesServiceTransport): + """gRPC backend transport for DataSourcesService. - Service definition for the Maps Platform Datasets API. + Service to manage primary, supplemental, inventory and other data + sources. See more in the `Merchant + Center `__ help + article. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -48,7 +49,7 @@ class MapsPlatformDatasetsV1AlphaGrpcTransport(MapsPlatformDatasetsV1AlphaTransp def __init__( self, *, - host: str = "mapsplatformdatasets.googleapis.com", + host: str = "merchantapi.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -66,7 +67,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'mapsplatformdatasets.googleapis.com'). + The hostname to connect to (default: 'merchantapi.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,7 +188,7 @@ def __init__( @classmethod def create_channel( cls, - host: str = "mapsplatformdatasets.googleapis.com", + host: str = "merchantapi.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -237,16 +238,17 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def create_dataset( + def get_data_source( self, - ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: - r"""Return a callable for the create dataset method over gRPC. + ) -> Callable[[datasources.GetDataSourceRequest], datasources.DataSource]: + r"""Return a callable for the get data source method over gRPC. - Create a new dataset for the specified project. + Retrieves the data source configuration for the given + account. Returns: - Callable[[~.CreateDatasetRequest], - ~.Dataset]: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: A function that, when called, will call the underlying RPC on the server. """ @@ -254,28 +256,28 @@ def create_dataset( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_dataset" not in self._stubs: - self._stubs["create_dataset"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/CreateDataset", - request_serializer=maps_platform_datasets.CreateDatasetRequest.serialize, - response_deserializer=gmm_dataset.Dataset.deserialize, + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/GetDataSource", + request_serializer=datasources.GetDataSourceRequest.serialize, + response_deserializer=datasources.DataSource.deserialize, ) - return self._stubs["create_dataset"] + return self._stubs["get_data_source"] @property - def update_dataset_metadata( + def list_data_sources( self, ) -> Callable[ - [maps_platform_datasets.UpdateDatasetMetadataRequest], gmm_dataset.Dataset + [datasources.ListDataSourcesRequest], datasources.ListDataSourcesResponse ]: - r"""Return a callable for the update dataset metadata method over gRPC. + r"""Return a callable for the list data sources method over gRPC. - Update the metadata for the dataset. To update the - data use: UploadDataset. + Lists the configurations for data sources for the + given account. Returns: - Callable[[~.UpdateDatasetMetadataRequest], - ~.Dataset]: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -283,25 +285,26 @@ def update_dataset_metadata( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_dataset_metadata" not in self._stubs: - self._stubs["update_dataset_metadata"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/UpdateDatasetMetadata", - request_serializer=maps_platform_datasets.UpdateDatasetMetadataRequest.serialize, - response_deserializer=gmm_dataset.Dataset.deserialize, + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/ListDataSources", + request_serializer=datasources.ListDataSourcesRequest.serialize, + response_deserializer=datasources.ListDataSourcesResponse.deserialize, ) - return self._stubs["update_dataset_metadata"] + return self._stubs["list_data_sources"] @property - def get_dataset( + def create_data_source( self, - ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: - r"""Return a callable for the get dataset method over gRPC. + ) -> Callable[[datasources.CreateDataSourceRequest], datasources.DataSource]: + r"""Return a callable for the create data source method over gRPC. - Get the published or latest version of the dataset. + Creates the new data source configuration for the + given account. Returns: - Callable[[~.GetDatasetRequest], - ~.Dataset]: + Callable[[~.CreateDataSourceRequest], + ~.DataSource]: A function that, when called, will call the underlying RPC on the server. """ @@ -309,57 +312,27 @@ def get_dataset( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_dataset" not in self._stubs: - self._stubs["get_dataset"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/GetDataset", - request_serializer=maps_platform_datasets.GetDatasetRequest.serialize, - response_deserializer=dataset.Dataset.deserialize, + if "create_data_source" not in self._stubs: + self._stubs["create_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/CreateDataSource", + request_serializer=datasources.CreateDataSourceRequest.serialize, + response_deserializer=datasources.DataSource.deserialize, ) - return self._stubs["get_dataset"] + return self._stubs["create_data_source"] @property - def list_dataset_versions( + def update_data_source( self, - ) -> Callable[ - [maps_platform_datasets.ListDatasetVersionsRequest], - maps_platform_datasets.ListDatasetVersionsResponse, - ]: - r"""Return a callable for the list dataset versions method over gRPC. - - List all the versions of a dataset. - - Returns: - Callable[[~.ListDatasetVersionsRequest], - ~.ListDatasetVersionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_dataset_versions" not in self._stubs: - self._stubs["list_dataset_versions"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasetVersions", - request_serializer=maps_platform_datasets.ListDatasetVersionsRequest.serialize, - response_deserializer=maps_platform_datasets.ListDatasetVersionsResponse.deserialize, - ) - return self._stubs["list_dataset_versions"] - - @property - def list_datasets( - self, - ) -> Callable[ - [maps_platform_datasets.ListDatasetsRequest], - maps_platform_datasets.ListDatasetsResponse, - ]: - r"""Return a callable for the list datasets method over gRPC. + ) -> Callable[[datasources.UpdateDataSourceRequest], datasources.DataSource]: + r"""Return a callable for the update data source method over gRPC. - List all the datasets for the specified project. + Updates the existing data source configuration. The + fields that are set in the update mask but not provided + in the resource will be deleted. Returns: - Callable[[~.ListDatasetsRequest], - ~.ListDatasetsResponse]: + Callable[[~.UpdateDataSourceRequest], + ~.DataSource]: A function that, when called, will call the underlying RPC on the server. """ @@ -367,25 +340,25 @@ def list_datasets( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_datasets" not in self._stubs: - self._stubs["list_datasets"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasets", - request_serializer=maps_platform_datasets.ListDatasetsRequest.serialize, - response_deserializer=maps_platform_datasets.ListDatasetsResponse.deserialize, + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/UpdateDataSource", + request_serializer=datasources.UpdateDataSourceRequest.serialize, + response_deserializer=datasources.DataSource.deserialize, ) - return self._stubs["list_datasets"] + return self._stubs["update_data_source"] @property - def delete_dataset( + def delete_data_source( self, - ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: - r"""Return a callable for the delete dataset method over gRPC. + ) -> Callable[[datasources.DeleteDataSourceRequest], empty_pb2.Empty]: + r"""Return a callable for the delete data source method over gRPC. - Delete the specified dataset and optionally all its - corresponding versions. + Deletes a data source from your Merchant Center + account. Returns: - Callable[[~.DeleteDatasetRequest], + Callable[[~.DeleteDataSourceRequest], ~.Empty]: A function that, when called, will call the underlying RPC on the server. @@ -394,26 +367,30 @@ def delete_dataset( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_dataset" not in self._stubs: - self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDataset", - request_serializer=maps_platform_datasets.DeleteDatasetRequest.serialize, + if "delete_data_source" not in self._stubs: + self._stubs["delete_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/DeleteDataSource", + request_serializer=datasources.DeleteDataSourceRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs["delete_dataset"] + return self._stubs["delete_data_source"] @property - def delete_dataset_version( + def fetch_data_source( self, - ) -> Callable[ - [maps_platform_datasets.DeleteDatasetVersionRequest], empty_pb2.Empty - ]: - r"""Return a callable for the delete dataset version method over gRPC. + ) -> Callable[[datasources.FetchDataSourceRequest], empty_pb2.Empty]: + r"""Return a callable for the fetch data source method over gRPC. - Delete a specific version of the dataset. + Performs the data fetch immediately (even outside + fetch schedule) on a data source from your Merchant + Center Account. If you need to call this method more + than once per day, you should use the Products service + to update your product data instead. + This method only works on data sources with a file input + set. Returns: - Callable[[~.DeleteDatasetVersionRequest], + Callable[[~.FetchDataSourceRequest], ~.Empty]: A function that, when called, will call the underlying RPC on the server. @@ -422,13 +399,13 @@ def delete_dataset_version( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_dataset_version" not in self._stubs: - self._stubs["delete_dataset_version"] = self.grpc_channel.unary_unary( - "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDatasetVersion", - request_serializer=maps_platform_datasets.DeleteDatasetVersionRequest.serialize, + if "fetch_data_source" not in self._stubs: + self._stubs["fetch_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/FetchDataSource", + request_serializer=datasources.FetchDataSourceRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs["delete_dataset_version"] + return self._stubs["fetch_data_source"] def close(self): self.grpc_channel.close() @@ -438,4 +415,4 @@ def kind(self) -> str: return "grpc" -__all__ = ("MapsPlatformDatasetsV1AlphaGrpcTransport",) +__all__ = ("DataSourcesServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4171050b69a8 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/grpc_asyncio.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import datasources + +from .base import DEFAULT_CLIENT_INFO, DataSourcesServiceTransport +from .grpc import DataSourcesServiceGrpcTransport + + +class DataSourcesServiceGrpcAsyncIOTransport(DataSourcesServiceTransport): + """gRPC AsyncIO backend transport for DataSourcesService. + + Service to manage primary, supplemental, inventory and other data + sources. See more in the `Merchant + Center `__ help + article. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_data_source( + self, + ) -> Callable[ + [datasources.GetDataSourceRequest], Awaitable[datasources.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Retrieves the data source configuration for the given + account. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/GetDataSource", + request_serializer=datasources.GetDataSourceRequest.serialize, + response_deserializer=datasources.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [datasources.ListDataSourcesRequest], + Awaitable[datasources.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists the configurations for data sources for the + given account. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/ListDataSources", + request_serializer=datasources.ListDataSourcesRequest.serialize, + response_deserializer=datasources.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def create_data_source( + self, + ) -> Callable[ + [datasources.CreateDataSourceRequest], Awaitable[datasources.DataSource] + ]: + r"""Return a callable for the create data source method over gRPC. + + Creates the new data source configuration for the + given account. + + Returns: + Callable[[~.CreateDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_source" not in self._stubs: + self._stubs["create_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/CreateDataSource", + request_serializer=datasources.CreateDataSourceRequest.serialize, + response_deserializer=datasources.DataSource.deserialize, + ) + return self._stubs["create_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[ + [datasources.UpdateDataSourceRequest], Awaitable[datasources.DataSource] + ]: + r"""Return a callable for the update data source method over gRPC. + + Updates the existing data source configuration. The + fields that are set in the update mask but not provided + in the resource will be deleted. + + Returns: + Callable[[~.UpdateDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/UpdateDataSource", + request_serializer=datasources.UpdateDataSourceRequest.serialize, + response_deserializer=datasources.DataSource.deserialize, + ) + return self._stubs["update_data_source"] + + @property + def delete_data_source( + self, + ) -> Callable[[datasources.DeleteDataSourceRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete data source method over gRPC. + + Deletes a data source from your Merchant Center + account. + + Returns: + Callable[[~.DeleteDataSourceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_source" not in self._stubs: + self._stubs["delete_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/DeleteDataSource", + request_serializer=datasources.DeleteDataSourceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_source"] + + @property + def fetch_data_source( + self, + ) -> Callable[[datasources.FetchDataSourceRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the fetch data source method over gRPC. + + Performs the data fetch immediately (even outside + fetch schedule) on a data source from your Merchant + Center Account. If you need to call this method more + than once per day, you should use the Products service + to update your product data instead. + This method only works on data sources with a file input + set. + + Returns: + Callable[[~.FetchDataSourceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_data_source" not in self._stubs: + self._stubs["fetch_data_source"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.DataSourcesService/FetchDataSource", + request_serializer=datasources.FetchDataSourceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["fetch_data_source"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_data_source: gapic_v1.method_async.wrap_method( + self.get_data_source, + default_timeout=None, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method_async.wrap_method( + self.list_data_sources, + default_timeout=None, + client_info=client_info, + ), + self.create_data_source: gapic_v1.method_async.wrap_method( + self.create_data_source, + default_timeout=None, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method_async.wrap_method( + self.update_data_source, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_source: gapic_v1.method_async.wrap_method( + self.delete_data_source, + default_timeout=None, + client_info=client_info, + ), + self.fetch_data_source: gapic_v1.method_async.wrap_method( + self.fetch_data_source, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("DataSourcesServiceGrpcAsyncIOTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/rest.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/rest.py similarity index 60% rename from packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/rest.py rename to packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/rest.py index 5945b1e64a4c..c20f208bb222 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/rest.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/transports/rest.py @@ -38,12 +38,10 @@ from google.protobuf import empty_pb2 # type: ignore -from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset -from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets -from google.maps.mapsplatformdatasets_v1alpha.types import dataset +from google.shopping.merchant_datasources_v1beta.types import datasources from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from .base import MapsPlatformDatasetsV1AlphaTransport +from .base import DataSourcesServiceTransport DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -52,8 +50,8 @@ ) -class MapsPlatformDatasetsV1AlphaRestInterceptor: - """Interceptor for MapsPlatformDatasetsV1Alpha. +class DataSourcesServiceRestInterceptor: + """Interceptor for DataSourcesService. Interceptors are used to manipulate requests, request metadata, and responses in arbitrary ways. @@ -63,217 +61,187 @@ class MapsPlatformDatasetsV1AlphaRestInterceptor: * Stripping extraneous information from responses These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MapsPlatformDatasetsV1AlphaRestTransport. + instance of a custom subclass when constructing the DataSourcesServiceRestTransport. .. code-block:: python - class MyCustomMapsPlatformDatasetsV1AlphaInterceptor(MapsPlatformDatasetsV1AlphaRestInterceptor): - def pre_create_dataset(self, request, metadata): + class MyCustomDataSourcesServiceInterceptor(DataSourcesServiceRestInterceptor): + def pre_create_data_source(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_dataset(self, response): + def post_create_data_source(self, response): logging.log(f"Received response: {response}") return response - def pre_delete_dataset(self, request, metadata): + def pre_delete_data_source(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_delete_dataset_version(self, request, metadata): + def pre_fetch_data_source(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_get_dataset(self, request, metadata): + def pre_get_data_source(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_dataset(self, response): + def post_get_data_source(self, response): logging.log(f"Received response: {response}") return response - def pre_list_datasets(self, request, metadata): + def pre_list_data_sources(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_datasets(self, response): + def post_list_data_sources(self, response): logging.log(f"Received response: {response}") return response - def pre_list_dataset_versions(self, request, metadata): + def pre_update_data_source(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_dataset_versions(self, response): + def post_update_data_source(self, response): logging.log(f"Received response: {response}") return response - def pre_update_dataset_metadata(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_dataset_metadata(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MapsPlatformDatasetsV1AlphaRestTransport(interceptor=MyCustomMapsPlatformDatasetsV1AlphaInterceptor()) - client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + transport = DataSourcesServiceRestTransport(interceptor=MyCustomDataSourcesServiceInterceptor()) + client = DataSourcesServiceClient(transport=transport) """ - def pre_create_dataset( + def pre_create_data_source( self, - request: maps_platform_datasets.CreateDatasetRequest, + request: datasources.CreateDataSourceRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[maps_platform_datasets.CreateDatasetRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_dataset + ) -> Tuple[datasources.CreateDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_data_source Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. + before they are sent to the DataSourcesService server. """ return request, metadata - def post_create_dataset(self, response: gmm_dataset.Dataset) -> gmm_dataset.Dataset: - """Post-rpc interceptor for create_dataset + def post_create_data_source( + self, response: datasources.DataSource + ) -> datasources.DataSource: + """Post-rpc interceptor for create_data_source Override in a subclass to manipulate the response - after it is returned by the MapsPlatformDatasetsV1Alpha server but before + after it is returned by the DataSourcesService server but before it is returned to user code. """ return response - def pre_delete_dataset( + def pre_delete_data_source( self, - request: maps_platform_datasets.DeleteDatasetRequest, + request: datasources.DeleteDataSourceRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[maps_platform_datasets.DeleteDatasetRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_dataset + ) -> Tuple[datasources.DeleteDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_data_source Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. + before they are sent to the DataSourcesService server. """ return request, metadata - def pre_delete_dataset_version( + def pre_fetch_data_source( self, - request: maps_platform_datasets.DeleteDatasetVersionRequest, + request: datasources.FetchDataSourceRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ - maps_platform_datasets.DeleteDatasetVersionRequest, Sequence[Tuple[str, str]] - ]: - """Pre-rpc interceptor for delete_dataset_version + ) -> Tuple[datasources.FetchDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_data_source Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. + before they are sent to the DataSourcesService server. """ return request, metadata - def pre_get_dataset( + def pre_get_data_source( self, - request: maps_platform_datasets.GetDatasetRequest, + request: datasources.GetDataSourceRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[maps_platform_datasets.GetDatasetRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_dataset + ) -> Tuple[datasources.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. + before they are sent to the DataSourcesService server. """ return request, metadata - def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: - """Post-rpc interceptor for get_dataset + def post_get_data_source( + self, response: datasources.DataSource + ) -> datasources.DataSource: + """Post-rpc interceptor for get_data_source Override in a subclass to manipulate the response - after it is returned by the MapsPlatformDatasetsV1Alpha server but before + after it is returned by the DataSourcesService server but before it is returned to user code. """ return response - def pre_list_datasets( + def pre_list_data_sources( self, - request: maps_platform_datasets.ListDatasetsRequest, + request: datasources.ListDataSourcesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[maps_platform_datasets.ListDatasetsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_datasets + ) -> Tuple[datasources.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. + before they are sent to the DataSourcesService server. """ return request, metadata - def post_list_datasets( - self, response: maps_platform_datasets.ListDatasetsResponse - ) -> maps_platform_datasets.ListDatasetsResponse: - """Post-rpc interceptor for list_datasets + def post_list_data_sources( + self, response: datasources.ListDataSourcesResponse + ) -> datasources.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources Override in a subclass to manipulate the response - after it is returned by the MapsPlatformDatasetsV1Alpha server but before + after it is returned by the DataSourcesService server but before it is returned to user code. """ return response - def pre_list_dataset_versions( + def pre_update_data_source( self, - request: maps_platform_datasets.ListDatasetVersionsRequest, + request: datasources.UpdateDataSourceRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ - maps_platform_datasets.ListDatasetVersionsRequest, Sequence[Tuple[str, str]] - ]: - """Pre-rpc interceptor for list_dataset_versions + ) -> Tuple[datasources.UpdateDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_source Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. + before they are sent to the DataSourcesService server. """ return request, metadata - def post_list_dataset_versions( - self, response: maps_platform_datasets.ListDatasetVersionsResponse - ) -> maps_platform_datasets.ListDatasetVersionsResponse: - """Post-rpc interceptor for list_dataset_versions + def post_update_data_source( + self, response: datasources.DataSource + ) -> datasources.DataSource: + """Post-rpc interceptor for update_data_source Override in a subclass to manipulate the response - after it is returned by the MapsPlatformDatasetsV1Alpha server but before - it is returned to user code. - """ - return response - - def pre_update_dataset_metadata( - self, - request: maps_platform_datasets.UpdateDatasetMetadataRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ - maps_platform_datasets.UpdateDatasetMetadataRequest, Sequence[Tuple[str, str]] - ]: - """Pre-rpc interceptor for update_dataset_metadata - - Override in a subclass to manipulate the request or metadata - before they are sent to the MapsPlatformDatasetsV1Alpha server. - """ - return request, metadata - - def post_update_dataset_metadata( - self, response: gmm_dataset.Dataset - ) -> gmm_dataset.Dataset: - """Post-rpc interceptor for update_dataset_metadata - - Override in a subclass to manipulate the response - after it is returned by the MapsPlatformDatasetsV1Alpha server but before + after it is returned by the DataSourcesService server but before it is returned to user code. """ return response @dataclasses.dataclass -class MapsPlatformDatasetsV1AlphaRestStub: +class DataSourcesServiceRestStub: _session: AuthorizedSession _host: str - _interceptor: MapsPlatformDatasetsV1AlphaRestInterceptor + _interceptor: DataSourcesServiceRestInterceptor -class MapsPlatformDatasetsV1AlphaRestTransport(MapsPlatformDatasetsV1AlphaTransport): - """REST backend transport for MapsPlatformDatasetsV1Alpha. +class DataSourcesServiceRestTransport(DataSourcesServiceTransport): + """REST backend transport for DataSourcesService. - Service definition for the Maps Platform Datasets API. + Service to manage primary, supplemental, inventory and other data + sources. See more in the `Merchant + Center `__ help + article. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -286,7 +254,7 @@ class MapsPlatformDatasetsV1AlphaRestTransport(MapsPlatformDatasetsV1AlphaTransp def __init__( self, *, - host: str = "mapsplatformdatasets.googleapis.com", + host: str = "merchantapi.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -295,14 +263,14 @@ def __init__( client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = "https", - interceptor: Optional[MapsPlatformDatasetsV1AlphaRestInterceptor] = None, + interceptor: Optional[DataSourcesServiceRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'mapsplatformdatasets.googleapis.com'). + The hostname to connect to (default: 'merchantapi.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -356,12 +324,12 @@ def __init__( ) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MapsPlatformDatasetsV1AlphaRestInterceptor() + self._interceptor = interceptor or DataSourcesServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _CreateDataset(MapsPlatformDatasetsV1AlphaRestStub): + class _CreateDataSource(DataSourcesServiceRestStub): def __hash__(self): - return hash("CreateDataset") + return hash("CreateDataSource") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -375,17 +343,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: maps_platform_datasets.CreateDatasetRequest, + request: datasources.CreateDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> gmm_dataset.Dataset: - r"""Call the create dataset method over HTTP. + ) -> datasources.DataSource: + r"""Call the create data source method over HTTP. Args: - request (~.maps_platform_datasets.CreateDatasetRequest): - The request object. Request to create a maps dataset. + request (~.datasources.CreateDataSourceRequest): + The request object. Request message for the + CreateDataSource method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -393,21 +362,24 @@ def __call__( sent along with the request as metadata. Returns: - ~.gmm_dataset.Dataset: - A representation of a maps platform - dataset. + ~.datasources.DataSource: + The `data + source `__ + for the Merchant Center account. """ http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1alpha/{parent=projects/*}/datasets", - "body": "dataset", + "uri": "/datasources/v1beta/{parent=accounts/*}/dataSources", + "body": "data_source", }, ] - request, metadata = self._interceptor.pre_create_dataset(request, metadata) - pb_request = maps_platform_datasets.CreateDatasetRequest.pb(request) + request, metadata = self._interceptor.pre_create_data_source( + request, metadata + ) + pb_request = datasources.CreateDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -446,16 +418,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = gmm_dataset.Dataset() - pb_resp = gmm_dataset.Dataset.pb(resp) + resp = datasources.DataSource() + pb_resp = datasources.DataSource.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_dataset(resp) + resp = self._interceptor.post_create_data_source(resp) return resp - class _DeleteDataset(MapsPlatformDatasetsV1AlphaRestStub): + class _DeleteDataSource(DataSourcesServiceRestStub): def __hash__(self): - return hash("DeleteDataset") + return hash("DeleteDataSource") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -469,19 +441,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: maps_platform_datasets.DeleteDatasetRequest, + request: datasources.DeleteDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ): - r"""Call the delete dataset method over HTTP. + r"""Call the delete data source method over HTTP. Args: - request (~.maps_platform_datasets.DeleteDatasetRequest): - The request object. Request to delete a dataset. - - The dataset to be deleted. + request (~.datasources.DeleteDataSourceRequest): + The request object. Request message for the + DeleteDataSource method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -492,11 +463,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1alpha/{name=projects/*/datasets/*}", + "uri": "/datasources/v1beta/{name=accounts/*/dataSources/*}", }, ] - request, metadata = self._interceptor.pre_delete_dataset(request, metadata) - pb_request = maps_platform_datasets.DeleteDatasetRequest.pb(request) + request, metadata = self._interceptor.pre_delete_data_source( + request, metadata + ) + pb_request = datasources.DeleteDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -528,9 +501,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteDatasetVersion(MapsPlatformDatasetsV1AlphaRestStub): + class _FetchDataSource(DataSourcesServiceRestStub): def __hash__(self): - return hash("DeleteDatasetVersion") + return hash("FetchDataSource") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -544,18 +517,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: maps_platform_datasets.DeleteDatasetVersionRequest, + request: datasources.FetchDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ): - r"""Call the delete dataset version method over HTTP. + r"""Call the fetch data source method over HTTP. Args: - request (~.maps_platform_datasets.DeleteDatasetVersionRequest): - The request object. Request to delete a version of a - dataset. + request (~.datasources.FetchDataSourceRequest): + The request object. Request message for the + FetchDataSource method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -565,95 +538,22 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1alpha/{name=projects/*/datasets/*}:deleteVersion", + "method": "post", + "uri": "/datasources/v1beta/{name=accounts/*/dataSources/*}:fetch", + "body": "*", }, ] - request, metadata = self._interceptor.pre_delete_dataset_version( + request, metadata = self._interceptor.pre_fetch_data_source( request, metadata ) - pb_request = maps_platform_datasets.DeleteDatasetVersionRequest.pb(request) + pb_request = datasources.FetchDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" + # Jsonify the request body - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDataset(MapsPlatformDatasetsV1AlphaRestStub): - def __hash__(self): - return hash("GetDataset") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: maps_platform_datasets.GetDatasetRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dataset.Dataset: - r"""Call the get dataset method over HTTP. - - Args: - request (~.maps_platform_datasets.GetDatasetRequest): - The request object. Request to get the specified dataset. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dataset.Dataset: - A representation of a maps platform - dataset. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1alpha/{name=projects/*/datasets/*}", - }, - ] - request, metadata = self._interceptor.pre_get_dataset(request, metadata) - pb_request = maps_platform_datasets.GetDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -676,6 +576,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -683,17 +584,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = dataset.Dataset() - pb_resp = dataset.Dataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_dataset(resp) - return resp - - class _ListDatasets(MapsPlatformDatasetsV1AlphaRestStub): + class _GetDataSource(DataSourcesServiceRestStub): def __hash__(self): - return hash("ListDatasets") + return hash("GetDataSource") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -707,18 +600,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: maps_platform_datasets.ListDatasetsRequest, + request: datasources.GetDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> maps_platform_datasets.ListDatasetsResponse: - r"""Call the list datasets method over HTTP. + ) -> datasources.DataSource: + r"""Call the get data source method over HTTP. Args: - request (~.maps_platform_datasets.ListDatasetsRequest): - The request object. Request to list datasets for the - project. + request (~.datasources.GetDataSourceRequest): + The request object. Request message for the GetDataSource + method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -726,20 +619,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.maps_platform_datasets.ListDatasetsResponse: - Response to list datasets for the - project. + ~.datasources.DataSource: + The `data + source `__ + for the Merchant Center account. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1alpha/{parent=projects/*}/datasets", + "uri": "/datasources/v1beta/{name=accounts/*/dataSources/*}", }, ] - request, metadata = self._interceptor.pre_list_datasets(request, metadata) - pb_request = maps_platform_datasets.ListDatasetsRequest.pb(request) + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = datasources.GetDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -772,16 +666,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = maps_platform_datasets.ListDatasetsResponse() - pb_resp = maps_platform_datasets.ListDatasetsResponse.pb(resp) + resp = datasources.DataSource() + pb_resp = datasources.DataSource.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_datasets(resp) + resp = self._interceptor.post_get_data_source(resp) return resp - class _ListDatasetVersions(MapsPlatformDatasetsV1AlphaRestStub): + class _ListDataSources(DataSourcesServiceRestStub): def __hash__(self): - return hash("ListDatasetVersions") + return hash("ListDataSources") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -795,18 +689,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: maps_platform_datasets.ListDatasetVersionsRequest, + request: datasources.ListDataSourcesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> maps_platform_datasets.ListDatasetVersionsResponse: - r"""Call the list dataset versions method over HTTP. + ) -> datasources.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. Args: - request (~.maps_platform_datasets.ListDatasetVersionsRequest): - The request object. Request to list of all versions of - the dataset. + request (~.datasources.ListDataSourcesRequest): + The request object. Request message for the + ListDataSources method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -814,22 +708,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.maps_platform_datasets.ListDatasetVersionsResponse: - Response with list of all versions of - the dataset. + ~.datasources.ListDataSourcesResponse: + Response message for the + ListDataSources method. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1alpha/{name=projects/*/datasets/*}:listVersions", + "uri": "/datasources/v1beta/{parent=accounts/*}/dataSources", }, ] - request, metadata = self._interceptor.pre_list_dataset_versions( + request, metadata = self._interceptor.pre_list_data_sources( request, metadata ) - pb_request = maps_platform_datasets.ListDatasetVersionsRequest.pb(request) + pb_request = datasources.ListDataSourcesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -862,18 +756,20 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = maps_platform_datasets.ListDatasetVersionsResponse() - pb_resp = maps_platform_datasets.ListDatasetVersionsResponse.pb(resp) + resp = datasources.ListDataSourcesResponse() + pb_resp = datasources.ListDataSourcesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_dataset_versions(resp) + resp = self._interceptor.post_list_data_sources(resp) return resp - class _UpdateDatasetMetadata(MapsPlatformDatasetsV1AlphaRestStub): + class _UpdateDataSource(DataSourcesServiceRestStub): def __hash__(self): - return hash("UpdateDatasetMetadata") + return hash("UpdateDataSource") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -885,18 +781,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: maps_platform_datasets.UpdateDatasetMetadataRequest, + request: datasources.UpdateDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> gmm_dataset.Dataset: - r"""Call the update dataset metadata method over HTTP. + ) -> datasources.DataSource: + r"""Call the update data source method over HTTP. Args: - request (~.maps_platform_datasets.UpdateDatasetMetadataRequest): - The request object. Request to update the metadata fields - of the dataset. + request (~.datasources.UpdateDataSourceRequest): + The request object. Request message for the + UpdateDataSource method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -904,23 +800,24 @@ def __call__( sent along with the request as metadata. Returns: - ~.gmm_dataset.Dataset: - A representation of a maps platform - dataset. + ~.datasources.DataSource: + The `data + source `__ + for the Merchant Center account. """ http_options: List[Dict[str, str]] = [ { "method": "patch", - "uri": "/v1alpha/{dataset.name=projects/*/datasets/*}", - "body": "dataset", + "uri": "/datasources/v1beta/{data_source.name=accounts/*/dataSources/*}", + "body": "data_source", }, ] - request, metadata = self._interceptor.pre_update_dataset_metadata( + request, metadata = self._interceptor.pre_update_data_source( request, metadata ) - pb_request = maps_platform_datasets.UpdateDatasetMetadataRequest.pb(request) + pb_request = datasources.UpdateDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -959,78 +856,62 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = gmm_dataset.Dataset() - pb_resp = gmm_dataset.Dataset.pb(resp) + resp = datasources.DataSource() + pb_resp = datasources.DataSource.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_dataset_metadata(resp) + resp = self._interceptor.post_update_data_source(resp) return resp @property - def create_dataset( - self, - ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_dataset( + def create_data_source( self, - ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: + ) -> Callable[[datasources.CreateDataSourceRequest], datasources.DataSource]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteDataset(self._session, self._host, self._interceptor) # type: ignore + return self._CreateDataSource(self._session, self._host, self._interceptor) # type: ignore @property - def delete_dataset_version( + def delete_data_source( self, - ) -> Callable[ - [maps_platform_datasets.DeleteDatasetVersionRequest], empty_pb2.Empty - ]: + ) -> Callable[[datasources.DeleteDataSourceRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteDatasetVersion(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteDataSource(self._session, self._host, self._interceptor) # type: ignore @property - def get_dataset( + def fetch_data_source( self, - ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: + ) -> Callable[[datasources.FetchDataSourceRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetDataset(self._session, self._host, self._interceptor) # type: ignore + return self._FetchDataSource(self._session, self._host, self._interceptor) # type: ignore @property - def list_datasets( + def get_data_source( self, - ) -> Callable[ - [maps_platform_datasets.ListDatasetsRequest], - maps_platform_datasets.ListDatasetsResponse, - ]: + ) -> Callable[[datasources.GetDataSourceRequest], datasources.DataSource]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDatasets(self._session, self._host, self._interceptor) # type: ignore + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore @property - def list_dataset_versions( + def list_data_sources( self, ) -> Callable[ - [maps_platform_datasets.ListDatasetVersionsRequest], - maps_platform_datasets.ListDatasetVersionsResponse, + [datasources.ListDataSourcesRequest], datasources.ListDataSourcesResponse ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDatasetVersions(self._session, self._host, self._interceptor) # type: ignore + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore @property - def update_dataset_metadata( + def update_data_source( self, - ) -> Callable[ - [maps_platform_datasets.UpdateDatasetMetadataRequest], gmm_dataset.Dataset - ]: + ) -> Callable[[datasources.UpdateDataSourceRequest], datasources.DataSource]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateDatasetMetadata(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateDataSource(self._session, self._host, self._interceptor) # type: ignore @property def kind(self) -> str: @@ -1040,4 +921,4 @@ def close(self): self._session.close() -__all__ = ("MapsPlatformDatasetsV1AlphaRestTransport",) +__all__ = ("DataSourcesServiceRestTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py new file mode 100644 index 000000000000..495c5a32635f --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .datasources import ( + CreateDataSourceRequest, + DataSource, + DeleteDataSourceRequest, + FetchDataSourceRequest, + GetDataSourceRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + UpdateDataSourceRequest, +) +from .datasourcetypes import ( + LocalInventoryDataSource, + PrimaryProductDataSource, + PromotionDataSource, + RegionalInventoryDataSource, + SupplementalProductDataSource, +) +from .fileinputs import FileInput + +__all__ = ( + "CreateDataSourceRequest", + "DataSource", + "DeleteDataSourceRequest", + "FetchDataSourceRequest", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "UpdateDataSourceRequest", + "LocalInventoryDataSource", + "PrimaryProductDataSource", + "PromotionDataSource", + "RegionalInventoryDataSource", + "SupplementalProductDataSource", + "FileInput", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasources.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasources.py new file mode 100644 index 000000000000..2681387c16d0 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasources.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import ( + datasourcetypes, + fileinputs, +) + +__protobuf__ = proto.module( + package="google.shopping.merchant.datasources.v1beta", + manifest={ + "DataSource", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "CreateDataSourceRequest", + "UpdateDataSourceRequest", + "FetchDataSourceRequest", + "DeleteDataSourceRequest", + }, +) + + +class DataSource(proto.Message): + r"""The `data + source `__ for + the Merchant Center account. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + primary_product_data_source (google.shopping.merchant_datasources_v1beta.types.PrimaryProductDataSource): + Required. The `primary data + source `__ + for local and online products. + + This field is a member of `oneof`_ ``Type``. + supplemental_product_data_source (google.shopping.merchant_datasources_v1beta.types.SupplementalProductDataSource): + Required. The `supplemental data + source `__ + for local and online products. + + This field is a member of `oneof`_ ``Type``. + local_inventory_data_source (google.shopping.merchant_datasources_v1beta.types.LocalInventoryDataSource): + Required. The `local + inventory `__ + data source. + + This field is a member of `oneof`_ ``Type``. + regional_inventory_data_source (google.shopping.merchant_datasources_v1beta.types.RegionalInventoryDataSource): + Required. The `regional + inventory `__ + data source. + + This field is a member of `oneof`_ ``Type``. + promotion_data_source (google.shopping.merchant_datasources_v1beta.types.PromotionDataSource): + Required. The + `promotion `__ + data source. + + This field is a member of `oneof`_ ``Type``. + name (str): + Identifier. The name of the data source. Format: + ``{datasource.name=accounts/{account}/dataSources/{datasource}}`` + data_source_id (int): + Output only. The data source id. + display_name (str): + Required. The displayed data source name in + the Merchant Center UI. + input (google.shopping.merchant_datasources_v1beta.types.DataSource.Input): + Output only. Determines the type of input to + the data source. Based on the input some + settings might not work. Only generic data + sources can be created through the API. + file_input (google.shopping.merchant_datasources_v1beta.types.FileInput): + Optional. The field is used only when data is + managed through a file. + """ + + class Input(proto.Enum): + r"""Determines the type of input to the data source. Based on the + input some settings might not be supported. + + Values: + INPUT_UNSPECIFIED (0): + Input unspecified. + API (1): + Represents data sources for which the data is + primarily provided through the API. + FILE (2): + Represents data sources for which the data is + primarily provided through file input. Data can + still be provided through the API. + UI (3): + The data source for products added directly + in Merchant Center. + This type of data source can not be created or + updated through this API, only by Merchant + Center UI. + + This type of data source is read only. + AUTOFEED (4): + This is also known as `Automated + feeds `__ + used to automatically build your product data. This type of + data source can be enabled or disabled through the Accounts + bundle. + """ + INPUT_UNSPECIFIED = 0 + API = 1 + FILE = 2 + UI = 3 + AUTOFEED = 4 + + primary_product_data_source: datasourcetypes.PrimaryProductDataSource = proto.Field( + proto.MESSAGE, + number=4, + oneof="Type", + message=datasourcetypes.PrimaryProductDataSource, + ) + supplemental_product_data_source: datasourcetypes.SupplementalProductDataSource = ( + proto.Field( + proto.MESSAGE, + number=5, + oneof="Type", + message=datasourcetypes.SupplementalProductDataSource, + ) + ) + local_inventory_data_source: datasourcetypes.LocalInventoryDataSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="Type", + message=datasourcetypes.LocalInventoryDataSource, + ) + regional_inventory_data_source: datasourcetypes.RegionalInventoryDataSource = ( + proto.Field( + proto.MESSAGE, + number=7, + oneof="Type", + message=datasourcetypes.RegionalInventoryDataSource, + ) + ) + promotion_data_source: datasourcetypes.PromotionDataSource = proto.Field( + proto.MESSAGE, + number=8, + oneof="Type", + message=datasourcetypes.PromotionDataSource, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + input: Input = proto.Field( + proto.ENUM, + number=10, + enum=Input, + ) + file_input: fileinputs.FileInput = proto.Field( + proto.MESSAGE, + number=11, + message=fileinputs.FileInput, + ) + + +class GetDataSourceRequest(proto.Message): + r"""Request message for the GetDataSource method. + + Attributes: + name (str): + Required. The name of the data source to retrieve. Format: + ``accounts/{account}/dataSources/{datasource}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataSourcesRequest(proto.Message): + r"""Request message for the ListDataSources method. + + Attributes: + parent (str): + Required. The account to list data sources for. Format: + ``accounts/{account}`` + page_size (int): + Optional. The maximum number of data sources + to return. The service may return fewer than + this value. The maximum value is 1000; values + above 1000 will be coerced to 1000. If + unspecified, the maximum number of data sources + will be returned. + page_token (str): + Optional. A page token, received from a previous + ``ListDataSources`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListDataSources`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDataSourcesResponse(proto.Message): + r"""Response message for the ListDataSources method. + + Attributes: + data_sources (MutableSequence[google.shopping.merchant_datasources_v1beta.types.DataSource]): + The data sources from the specified account. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + data_sources: MutableSequence["DataSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDataSourceRequest(proto.Message): + r"""Request message for the CreateDataSource method. + + Attributes: + parent (str): + Required. The account where this data source will be + created. Format: ``accounts/{account}`` + data_source (google.shopping.merchant_datasources_v1beta.types.DataSource): + Required. The data source to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_source: "DataSource" = proto.Field( + proto.MESSAGE, + number=2, + message="DataSource", + ) + + +class UpdateDataSourceRequest(proto.Message): + r"""Request message for the UpdateDataSource method. + + Attributes: + data_source (google.shopping.merchant_datasources_v1beta.types.DataSource): + Required. The data source resource to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of data source fields to be updated. + + Fields specified in the update mask without a value + specified in the body will be deleted from the data source. + + Providing special "*" value for full data source replacement + is not supported. + """ + + data_source: "DataSource" = proto.Field( + proto.MESSAGE, + number=1, + message="DataSource", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class FetchDataSourceRequest(proto.Message): + r"""Request message for the FetchDataSource method. + + Attributes: + name (str): + Required. The name of the data source resource to fetch. + Format: ``accounts/{account}/dataSources/{datasource}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDataSourceRequest(proto.Message): + r"""Request message for the DeleteDataSource method. + + Attributes: + name (str): + Required. The name of the data source to delete. Format: + ``accounts/{account}/dataSources/{datasource}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py new file mode 100644 index 000000000000..61ec51caa2ca --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.datasources.v1beta", + manifest={ + "PrimaryProductDataSource", + "SupplementalProductDataSource", + "LocalInventoryDataSource", + "RegionalInventoryDataSource", + "PromotionDataSource", + }, +) + + +class PrimaryProductDataSource(proto.Message): + r"""The primary data source for local and online products. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + channel (google.shopping.merchant_datasources_v1beta.types.PrimaryProductDataSource.Channel): + Required. Immutable. Specifies the type of + data source channel. + feed_label (str): + Optional. Immutable. The feed label that is specified on the + data source level. + + Must be less than or equal to 20 uppercase letters (A-Z), + numbers (0-9), and dashes (-). + + See also `migration to feed + labels `__. + + ``feedLabel`` and ``contentLanguage`` must be either both + set or unset for data sources with product content type. + They must be set for data sources with a file input. + + If set, the data source will only accept products matching + this combination. If unset, the data source will accept + products without that restriction. + + This field is a member of `oneof`_ ``_feed_label``. + content_language (str): + Optional. Immutable. The two-letter ISO 639-1 language of + the items in the data source. + + ``feedLabel`` and ``contentLanguage`` must be either both + set or unset. The fields can only be unset for data sources + without file input. + + If set, the data source will only accept products matching + this combination. If unset, the data source will accept + products without that restriction. + + This field is a member of `oneof`_ ``_content_language``. + countries (MutableSequence[str]): + Optional. The countries where the items may be displayed. + Represented as a `CLDR territory + code `__. + """ + + class Channel(proto.Enum): + r"""Data Source Channel. + + Channel is used to distinguish between data sources for + different product verticals. + + Values: + CHANNEL_UNSPECIFIED (0): + Not specified. + ONLINE_PRODUCTS (1): + Online product. + LOCAL_PRODUCTS (2): + Local product. + PRODUCTS (3): + Unified data source for both local and online + products. + """ + CHANNEL_UNSPECIFIED = 0 + ONLINE_PRODUCTS = 1 + LOCAL_PRODUCTS = 2 + PRODUCTS = 3 + + channel: Channel = proto.Field( + proto.ENUM, + number=3, + enum=Channel, + ) + feed_label: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + content_language: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class SupplementalProductDataSource(proto.Message): + r"""The supplemental data source for local and online products. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + feed_label (str): + Optional. Immutable. The feed label that is specified on the + data source level. + + Must be less than or equal to 20 uppercase letters (A-Z), + numbers (0-9), and dashes (-). + + See also `migration to feed + labels `__. + + ``feedLabel`` and ``contentLanguage`` must be either both + set or unset for data sources with product content type. + They must be set for data sources with a file input. + + If set, the data source will only accept products matching + this combination. If unset, the data source will accept + produts without that restriction. + + This field is a member of `oneof`_ ``_feed_label``. + content_language (str): + Optional. Immutable. The two-letter ISO 639-1 language of + the items in the data source. + + ``feedLabel`` and ``contentLanguage`` must be either both + set or unset. The fields can only be unset for data sources + without file input. + + If set, the data source will only accept products matching + this combination. If unset, the data source will accept + produts without that restriction. + + This field is a member of `oneof`_ ``_content_language``. + """ + + feed_label: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + content_language: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + + +class LocalInventoryDataSource(proto.Message): + r"""The local inventory data source. + + Attributes: + feed_label (str): + Required. Immutable. The feed label of the offers to which + the local inventory is provided. + + Must be less than or equal to 20 uppercase letters (A-Z), + numbers (0-9), and dashes (-). + + See also `migration to feed + labels `__. + content_language (str): + Required. Immutable. The two-letter ISO 639-1 + language of the items to which the local + inventory is provided. + """ + + feed_label: str = proto.Field( + proto.STRING, + number=4, + ) + content_language: str = proto.Field( + proto.STRING, + number=5, + ) + + +class RegionalInventoryDataSource(proto.Message): + r"""The regional inventory data source. + + Attributes: + feed_label (str): + Required. Immutable. The feed label of the offers to which + the regional inventory is provided. + + Must be less than or equal to 20 uppercase letters (A-Z), + numbers (0-9), and dashes (-). + + See also `migration to feed + labels `__. + content_language (str): + Required. Immutable. The two-letter ISO 639-1 + language of the items to which the regional + inventory is provided. + """ + + feed_label: str = proto.Field( + proto.STRING, + number=4, + ) + content_language: str = proto.Field( + proto.STRING, + number=5, + ) + + +class PromotionDataSource(proto.Message): + r"""The promotion data source. + + Attributes: + target_country (str): + Required. Immutable. The target country used as part of the + unique identifier. Represented as a `CLDR territory + code `__. + + Promotions are only available in selected + `countries `__. + content_language (str): + Required. Immutable. The two-letter ISO 639-1 + language of the items in the data source. + """ + + target_country: str = proto.Field( + proto.STRING, + number=1, + ) + content_language: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileinputs.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileinputs.py new file mode 100644 index 000000000000..1c76ffc1ba85 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileinputs.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.datasources.v1beta", + manifest={ + "FileInput", + }, +) + + +class FileInput(proto.Message): + r"""The data specific for file data sources. This field is empty + for other data source inputs. + + Attributes: + fetch_settings (google.shopping.merchant_datasources_v1beta.types.FileInput.FetchSettings): + Optional. Fetch details to deliver the data source. It + contains settings for ``FETCH`` and ``GOOGLE_SHEETS`` file + input types. The required fields vary based on the frequency + of fetching. + file_name (str): + Optional. The file name of the data source. Required for + ``UPLOAD`` file input type. + file_input_type (google.shopping.merchant_datasources_v1beta.types.FileInput.FileInputType): + Output only. The type of file input. + """ + + class FileInputType(proto.Enum): + r"""The method of file delivery. + + Values: + FILE_INPUT_TYPE_UNSPECIFIED (0): + File input type unspecified. + UPLOAD (1): + The file is uploaded through SFTP, Google + Cloud Storage or manually in the Merchant + Center. + FETCH (2): + The file is fetched from the configured + [fetch_uri][google.shopping.content.bundles.DataSources.FileInput.FetchSettings.fetch_uri]. + GOOGLE_SHEETS (3): + The file is fetched from Google Sheets specified in the + [fetch_uri][google.shopping.content.bundles.DataSources.FileInput.FetchSettings.fetch_uri]. + """ + FILE_INPUT_TYPE_UNSPECIFIED = 0 + UPLOAD = 1 + FETCH = 2 + GOOGLE_SHEETS = 3 + + class FetchSettings(proto.Message): + r"""Fetch details to deliver the data source. + + Attributes: + enabled (bool): + Optional. Enables or pauses the fetch + schedule. + day_of_month (int): + Optional. The day of the month when the data + source file should be fetched (1-31). This field + can only be set for monthly frequency. + time_of_day (google.type.timeofday_pb2.TimeOfDay): + Optional. The hour of the day when the data + source file should be fetched. Minutes and + seconds are not supported and will be ignored. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Optional. The day of the week when the data + source file should be fetched. This field can + only be set for weekly frequency. + time_zone (str): + Optional. `Time zone `__ used for + schedule. UTC by default. For example, + "America/Los_Angeles". + frequency (google.shopping.merchant_datasources_v1beta.types.FileInput.FetchSettings.Frequency): + Required. The frequency describing fetch + schedule. + fetch_uri (str): + Optional. The URL where the data source file + can be fetched. Google Merchant Center supports + automatic scheduled uploads using the HTTP, + HTTPS or SFTP protocols, so the value will need + to be a valid link using one of those three + protocols. Immutable for Google Sheets files. + username (str): + Optional. An optional user name for [fetch + url][google.shopping.content.bundles.DataSources.FileInput.fetch_url]. + Used for `submitting data sources through + SFTP `__. + password (str): + Optional. An optional password for [fetch + url][google.shopping.content.bundles.DataSources.FileInput.fetch_url]. + Used for `submitting data sources through + SFTP `__. + """ + + class Frequency(proto.Enum): + r"""The required fields vary based on the frequency of fetching. For a + monthly fetch schedule, [day of + month][google.shopping.content.bundles.DataSources.FileInput.FetchSchedule.day_of_month] + and [hour of + day][google.shopping.content.bundles.DataSources.FileInput.FetchSchedule.time_of_day] + are required. For a weekly fetch schedule, [day of + week][google.shopping.content.bundles.DataSources.FileInput.FetchSchedule.day_of_week] + and [hour of + day][google.shopping.content.bundles.DataSources.FileInput.FetchSchedule.time_of_day] + are required. For a daily fetch schedule, only an [hour of + day][google.shopping.content.bundles.DataSources.FileInput.FetchSchedule.time_of_day] + is required. + + Values: + FREQUENCY_UNSPECIFIED (0): + Frequency unspecified. + FREQUENCY_DAILY (1): + The fetch happens every day. + FREQUENCY_WEEKLY (2): + The fetch happens every week. + FREQUENCY_MONTHLY (3): + The fetch happens every month. + """ + FREQUENCY_UNSPECIFIED = 0 + FREQUENCY_DAILY = 1 + FREQUENCY_WEEKLY = 2 + FREQUENCY_MONTHLY = 3 + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + day_of_month: int = proto.Field( + proto.INT32, + number=2, + ) + time_of_day: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=4, + enum=dayofweek_pb2.DayOfWeek, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + frequency: "FileInput.FetchSettings.Frequency" = proto.Field( + proto.ENUM, + number=6, + enum="FileInput.FetchSettings.Frequency", + ) + fetch_uri: str = proto.Field( + proto.STRING, + number=7, + ) + username: str = proto.Field( + proto.STRING, + number=8, + ) + password: str = proto.Field( + proto.STRING, + number=9, + ) + + fetch_settings: FetchSettings = proto.Field( + proto.MESSAGE, + number=1, + message=FetchSettings, + ) + file_name: str = proto.Field( + proto.STRING, + number=2, + ) + file_input_type: FileInputType = proto.Field( + proto.ENUM, + number=3, + enum=FileInputType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/mypy.ini b/packages/google-shopping-merchant-datasources/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-shopping-merchant-datasources/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-shopping-merchant-datasources/noxfile.py b/packages/google-shopping-merchant-datasources/noxfile.py new file mode 100644 index 000000000000..1e6cd48d0529 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py new file mode 100644 index 000000000000..3a58c16a898f --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_create_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.CreateDataSourceRequest( + parent="parent_value", + data_source=data_source, + ) + + # Make the request + response = await client.create_data_source(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py new file mode 100644 index 000000000000..e83b38fa6921 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_create_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.CreateDataSourceRequest( + parent="parent_value", + data_source=data_source, + ) + + # Make the request + response = client.create_data_source(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_delete_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_delete_data_source_async.py new file mode 100644 index 000000000000..3acdea1ade71 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_delete_data_source_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_DeleteDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_delete_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.DeleteDataSourceRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_source(request=request) + + +# [END merchantapi_v1beta_generated_DataSourcesService_DeleteDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_delete_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_delete_data_source_sync.py new file mode 100644 index 000000000000..443f82260fcd --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_delete_data_source_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_DeleteDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_delete_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.DeleteDataSourceRequest( + name="name_value", + ) + + # Make the request + client.delete_data_source(request=request) + + +# [END merchantapi_v1beta_generated_DataSourcesService_DeleteDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_fetch_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_fetch_data_source_async.py new file mode 100644 index 000000000000..14df43d8bd47 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_fetch_data_source_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_FetchDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_fetch_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.FetchDataSourceRequest( + name="name_value", + ) + + # Make the request + await client.fetch_data_source(request=request) + + +# [END merchantapi_v1beta_generated_DataSourcesService_FetchDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_fetch_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_fetch_data_source_sync.py new file mode 100644 index 000000000000..644b35470074 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_fetch_data_source_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_FetchDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_fetch_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.FetchDataSourceRequest( + name="name_value", + ) + + # Make the request + client.fetch_data_source(request=request) + + +# [END merchantapi_v1beta_generated_DataSourcesService_FetchDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py new file mode 100644 index 000000000000..091d08e5fbd5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_GetDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_get_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_GetDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py new file mode 100644 index 000000000000..fa9c6c87bc15 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_GetDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_get_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_GetDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py new file mode 100644 index 000000000000..78339cc39b35 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_ListDataSources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_list_data_sources(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_ListDataSources_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py new file mode 100644 index 000000000000..46febb24d2d8 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_ListDataSources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_list_data_sources(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_ListDataSources_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py new file mode 100644 index 000000000000..a727ef72c51e --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_update_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceAsyncClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.UpdateDataSourceRequest( + data_source=data_source, + ) + + # Make the request + response = await client.update_data_source(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py new file mode 100644 index 000000000000..72d7760192c6 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_update_data_source(): + # Create a client + client = merchant_datasources_v1beta.DataSourcesServiceClient() + + # Initialize request argument(s) + data_source = merchant_datasources_v1beta.DataSource() + data_source.primary_product_data_source.channel = "PRODUCTS" + data_source.display_name = "display_name_value" + + request = merchant_datasources_v1beta.UpdateDataSourceRequest( + data_source=data_source, + ) + + # Make the request + response = client.update_data_source(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json new file mode 100644 index 000000000000..ee381d03839d --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json @@ -0,0 +1,977 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.shopping.merchant.datasources.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-shopping-merchant-datasources", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient", + "shortName": "DataSourcesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient.create_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.CreateDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "CreateDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.CreateDataSourceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_source", + "type": "google.shopping.merchant_datasources_v1beta.types.DataSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.DataSource", + "shortName": "create_data_source" + }, + "description": "Sample for CreateDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient", + "shortName": "DataSourcesServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient.create_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.CreateDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "CreateDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.CreateDataSourceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_source", + "type": "google.shopping.merchant_datasources_v1beta.types.DataSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.DataSource", + "shortName": "create_data_source" + }, + "description": "Sample for CreateDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient", + "shortName": "DataSourcesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient.delete_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.DeleteDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "DeleteDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.DeleteDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_data_source" + }, + "description": "Sample for DeleteDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_delete_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_DeleteDataSource_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_delete_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient", + "shortName": "DataSourcesServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient.delete_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.DeleteDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "DeleteDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.DeleteDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_data_source" + }, + "description": "Sample for DeleteDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_delete_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_DeleteDataSource_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_delete_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient", + "shortName": "DataSourcesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient.fetch_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.FetchDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "FetchDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.FetchDataSourceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "fetch_data_source" + }, + "description": "Sample for FetchDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_fetch_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_FetchDataSource_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_fetch_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient", + "shortName": "DataSourcesServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient.fetch_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.FetchDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "FetchDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.FetchDataSourceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "fetch_data_source" + }, + "description": "Sample for FetchDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_fetch_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_FetchDataSource_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_fetch_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient", + "shortName": "DataSourcesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient.get_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.GetDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_GetDataSource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient", + "shortName": "DataSourcesServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient.get_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.GetDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_GetDataSource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient", + "shortName": "DataSourcesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient.list_data_sources", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.ListDataSources", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.services.data_sources_service.pagers.ListDataSourcesAsyncPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_ListDataSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient", + "shortName": "DataSourcesServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient.list_data_sources", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.ListDataSources", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.services.data_sources_service.pagers.ListDataSourcesPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_ListDataSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient", + "shortName": "DataSourcesServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceAsyncClient.update_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.UpdateDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "UpdateDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.UpdateDataSourceRequest" + }, + { + "name": "data_source", + "type": "google.shopping.merchant_datasources_v1beta.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.DataSource", + "shortName": "update_data_source" + }, + "description": "Sample for UpdateDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient", + "shortName": "DataSourcesServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.DataSourcesServiceClient.update_data_source", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService.UpdateDataSource", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.DataSourcesService", + "shortName": "DataSourcesService" + }, + "shortName": "UpdateDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.UpdateDataSourceRequest" + }, + { + "name": "data_source", + "type": "google.shopping.merchant_datasources_v1beta.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.DataSource", + "shortName": "update_data_source" + }, + "description": "Sample for UpdateDataSource", + "file": "merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py" + } + ] +} diff --git a/packages/google-shopping-merchant-datasources/scripts/decrypt-secrets.sh b/packages/google-shopping-merchant-datasources/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py b/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py new file mode 100644 index 000000000000..77f6b0db701f --- /dev/null +++ b/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class merchant_datasourcesCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_data_source': ('parent', 'data_source', ), + 'delete_data_source': ('name', ), + 'fetch_data_source': ('name', ), + 'get_data_source': ('name', ), + 'list_data_sources': ('parent', 'page_size', 'page_token', ), + 'update_data_source': ('data_source', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=merchant_datasourcesCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the merchant_datasources client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-shopping-merchant-datasources/setup.py b/packages/google-shopping-merchant-datasources/setup.py new file mode 100644 index 000000000000..65222b808bf9 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-shopping-merchant-datasources" + + +description = "Google Shopping Merchant Datasources API client library" + +version = None + +with open( + os.path.join(package_root, "google/shopping/merchant_datasources/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-shopping-merchant-datasources/testing/.gitignore b/packages/google-shopping-merchant-datasources/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-shopping-merchant-datasources/testing/constraints-3.10.txt b/packages/google-shopping-merchant-datasources/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-shopping-merchant-datasources/testing/constraints-3.11.txt b/packages/google-shopping-merchant-datasources/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-shopping-merchant-datasources/testing/constraints-3.12.txt b/packages/google-shopping-merchant-datasources/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-shopping-merchant-datasources/testing/constraints-3.7.txt b/packages/google-shopping-merchant-datasources/testing/constraints-3.7.txt new file mode 100644 index 000000000000..b8a550c73855 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-shopping-merchant-datasources/testing/constraints-3.8.txt b/packages/google-shopping-merchant-datasources/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-shopping-merchant-datasources/testing/constraints-3.9.txt b/packages/google-shopping-merchant-datasources/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-shopping-merchant-datasources/tests/__init__.py b/packages/google-shopping-merchant-datasources/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-datasources/tests/unit/__init__.py b/packages/google-shopping-merchant-datasources/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/__init__.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/__init__.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py new file mode 100644 index 000000000000..c26479ad2b75 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py @@ -0,0 +1,6564 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_datasources_v1beta.services.data_sources_service import ( + DataSourcesServiceAsyncClient, + DataSourcesServiceClient, + pagers, + transports, +) +from google.shopping.merchant_datasources_v1beta.types import ( + datasources, + datasourcetypes, + fileinputs, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataSourcesServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataSourcesServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataSourcesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataSourcesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataSourcesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataSourcesServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert DataSourcesServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataSourcesServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataSourcesServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DataSourcesServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataSourcesServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataSourcesServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataSourcesServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataSourcesServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataSourcesServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataSourcesServiceClient._get_client_cert_source(None, False) is None + assert ( + DataSourcesServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + DataSourcesServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DataSourcesServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DataSourcesServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DataSourcesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceClient), +) +@mock.patch.object( + DataSourcesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataSourcesServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataSourcesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataSourcesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DataSourcesServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DataSourcesServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DataSourcesServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataSourcesServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DataSourcesServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == DataSourcesServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataSourcesServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DataSourcesServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataSourcesServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DataSourcesServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataSourcesServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DataSourcesServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DataSourcesServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DataSourcesServiceClient._get_universe_domain(None, None) + == DataSourcesServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DataSourcesServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataSourcesServiceClient, transports.DataSourcesServiceGrpcTransport, "grpc"), + (DataSourcesServiceClient, transports.DataSourcesServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataSourcesServiceClient, "grpc"), + (DataSourcesServiceAsyncClient, "grpc_asyncio"), + (DataSourcesServiceClient, "rest"), + ], +) +def test_data_sources_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DataSourcesServiceGrpcTransport, "grpc"), + (transports.DataSourcesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataSourcesServiceRestTransport, "rest"), + ], +) +def test_data_sources_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataSourcesServiceClient, "grpc"), + (DataSourcesServiceAsyncClient, "grpc_asyncio"), + (DataSourcesServiceClient, "rest"), + ], +) +def test_data_sources_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_data_sources_service_client_get_transport_class(): + transport = DataSourcesServiceClient.get_transport_class() + available_transports = [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceRestTransport, + ] + assert transport in available_transports + + transport = DataSourcesServiceClient.get_transport_class("grpc") + assert transport == transports.DataSourcesServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataSourcesServiceClient, transports.DataSourcesServiceGrpcTransport, "grpc"), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataSourcesServiceClient, transports.DataSourcesServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DataSourcesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceClient), +) +@mock.patch.object( + DataSourcesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceAsyncClient), +) +def test_data_sources_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataSourcesServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataSourcesServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataSourcesServiceClient, + transports.DataSourcesServiceGrpcTransport, + "grpc", + "true", + ), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataSourcesServiceClient, + transports.DataSourcesServiceGrpcTransport, + "grpc", + "false", + ), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DataSourcesServiceClient, + transports.DataSourcesServiceRestTransport, + "rest", + "true", + ), + ( + DataSourcesServiceClient, + transports.DataSourcesServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DataSourcesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceClient), +) +@mock.patch.object( + DataSourcesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_sources_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DataSourcesServiceClient, DataSourcesServiceAsyncClient] +) +@mock.patch.object( + DataSourcesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataSourcesServiceClient), +) +@mock.patch.object( + DataSourcesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataSourcesServiceAsyncClient), +) +def test_data_sources_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DataSourcesServiceClient, DataSourcesServiceAsyncClient] +) +@mock.patch.object( + DataSourcesServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceClient), +) +@mock.patch.object( + DataSourcesServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataSourcesServiceAsyncClient), +) +def test_data_sources_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataSourcesServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataSourcesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataSourcesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataSourcesServiceClient, transports.DataSourcesServiceGrpcTransport, "grpc"), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataSourcesServiceClient, transports.DataSourcesServiceRestTransport, "rest"), + ], +) +def test_data_sources_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataSourcesServiceClient, + transports.DataSourcesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DataSourcesServiceClient, + transports.DataSourcesServiceRestTransport, + "rest", + None, + ), + ], +) +def test_data_sources_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_data_sources_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_datasources_v1beta.services.data_sources_service.transports.DataSourcesServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataSourcesServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataSourcesServiceClient, + transports.DataSourcesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_data_sources_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source(request_type, transport: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasources.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +def test_get_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.GetDataSourceRequest() + + +def test_get_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasources.GetDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.GetDataSourceRequest( + name="name_value", + ) + + +def test_get_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + ) + response = await client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.GetDataSourceRequest() + + +@pytest.mark.asyncio +async def test_get_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_source + ] = mock_object + + request = {} + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_async( + transport: str = "grpc_asyncio", request_type=datasources.GetDataSourceRequest +): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + ) + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasources.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +@pytest.mark.asyncio +async def test_get_data_source_async_from_dict(): + await test_get_data_source_async(request_type=dict) + + +def test_get_data_source_field_headers(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = datasources.DataSource() + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource() + ) + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_source_flattened_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + datasources.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + datasources.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources(request_type, transport: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasources.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.ListDataSourcesRequest() + + +def test_list_data_sources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasources.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_data_sources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.ListDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_list_data_sources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_sources + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_sources + ] = mock_object + + request = {} + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_async( + transport: str = "grpc_asyncio", request_type=datasources.ListDataSourcesRequest +): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasources.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_data_sources_async_from_dict(): + await test_list_data_sources_async(request_type=dict) + + +def test_list_data_sources_field_headers(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = datasources.ListDataSourcesResponse() + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.ListDataSourcesResponse() + ) + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.ListDataSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_sources_flattened_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + datasources.ListDataSourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + datasources.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_pager(transport_name: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + datasources.DataSource(), + ], + next_page_token="abc", + ), + datasources.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + ], + next_page_token="ghi", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datasources.DataSource) for i in results) + + +def test_list_data_sources_pages(transport_name: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + datasources.DataSource(), + ], + next_page_token="abc", + ), + datasources.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + ], + next_page_token="ghi", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + datasources.DataSource(), + ], + next_page_token="abc", + ), + datasources.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + ], + next_page_token="ghi", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datasources.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + datasources.DataSource(), + ], + next_page_token="abc", + ), + datasources.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + ], + next_page_token="ghi", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.CreateDataSourceRequest, + dict, + ], +) +def test_create_data_source(request_type, transport: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + response = client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasources.CreateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +def test_create_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.CreateDataSourceRequest() + + +def test_create_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasources.CreateDataSourceRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.CreateDataSourceRequest( + parent="parent_value", + ) + + +def test_create_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_data_source + ] = mock_rpc + request = {} + client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + ) + response = await client.create_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.CreateDataSourceRequest() + + +@pytest.mark.asyncio +async def test_create_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_source + ] = mock_object + + request = {} + await client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_data_source_async( + transport: str = "grpc_asyncio", request_type=datasources.CreateDataSourceRequest +): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + ) + response = await client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasources.CreateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +@pytest.mark.asyncio +async def test_create_data_source_async_from_dict(): + await test_create_data_source_async(request_type=dict) + + +def test_create_data_source_field_headers(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.CreateDataSourceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + call.return_value = datasources.DataSource() + client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_data_source_field_headers_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.CreateDataSourceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource() + ) + await client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_data_source_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_source( + parent="parent_value", + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_source + mock_val = datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ) + assert arg == mock_val + + +def test_create_data_source_flattened_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_source( + datasources.CreateDataSourceRequest(), + parent="parent_value", + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + ) + + +@pytest.mark.asyncio +async def test_create_data_source_flattened_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_source( + parent="parent_value", + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_source + mock_val = datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_data_source_flattened_error_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_source( + datasources.CreateDataSourceRequest(), + parent="parent_value", + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source(request_type, transport: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + response = client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasources.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +def test_update_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.UpdateDataSourceRequest() + + +def test_update_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasources.UpdateDataSourceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.UpdateDataSourceRequest() + + +def test_update_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + ) + response = await client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.UpdateDataSourceRequest() + + +@pytest.mark.asyncio +async def test_update_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_source + ] = mock_object + + request = {} + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_async( + transport: str = "grpc_asyncio", request_type=datasources.UpdateDataSourceRequest +): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + ) + response = await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasources.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +@pytest.mark.asyncio +async def test_update_data_source_async_from_dict(): + await test_update_data_source_async(request_type=dict) + + +def test_update_data_source_field_headers(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = datasources.DataSource() + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_source_field_headers_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource() + ) + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +def test_update_data_source_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_source( + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_source_flattened_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + datasources.UpdateDataSourceRequest(), + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasources.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasources.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_source( + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_error_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_source( + datasources.UpdateDataSourceRequest(), + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.DeleteDataSourceRequest, + dict, + ], +) +def test_delete_data_source(request_type, transport: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasources.DeleteDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.DeleteDataSourceRequest() + + +def test_delete_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasources.DeleteDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.DeleteDataSourceRequest( + name="name_value", + ) + + +def test_delete_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_source + ] = mock_rpc + request = {} + client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.DeleteDataSourceRequest() + + +@pytest.mark.asyncio +async def test_delete_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_source + ] = mock_object + + request = {} + await client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_source_async( + transport: str = "grpc_asyncio", request_type=datasources.DeleteDataSourceRequest +): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasources.DeleteDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_data_source_async_from_dict(): + await test_delete_data_source_async(request_type=dict) + + +def test_delete_data_source_field_headers(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.DeleteDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + call.return_value = None + client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_data_source_field_headers_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.DeleteDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_data_source_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_data_source_flattened_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_source( + datasources.DeleteDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_data_source_flattened_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_source_flattened_error_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_source( + datasources.DeleteDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.FetchDataSourceRequest, + dict, + ], +) +def test_fetch_data_source(request_type, transport: str = "grpc"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasources.FetchDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_fetch_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.FetchDataSourceRequest() + + +def test_fetch_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasources.FetchDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.FetchDataSourceRequest( + name="name_value", + ) + + +def test_fetch_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_data_source + ] = mock_rpc + request = {} + client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.fetch_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasources.FetchDataSourceRequest() + + +@pytest.mark.asyncio +async def test_fetch_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_data_source + ] = mock_object + + request = {} + await client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.fetch_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_data_source_async( + transport: str = "grpc_asyncio", request_type=datasources.FetchDataSourceRequest +): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasources.FetchDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_fetch_data_source_async_from_dict(): + await test_fetch_data_source_async(request_type=dict) + + +def test_fetch_data_source_field_headers(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.FetchDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + call.return_value = None + client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_data_source_field_headers_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasources.FetchDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +def test_get_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_rest_required_fields( + request_type=datasources.GetDataSourceRequest, +): + transport_class = transports.DataSourcesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataSourcesServiceRestInterceptor(), + ) + client = DataSourcesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datasources.GetDataSourceRequest.pb( + datasources.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datasources.DataSource.to_json( + datasources.DataSource() + ) + + request = datasources.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datasources.DataSource() + + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=datasources.GetDataSourceRequest +): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/dataSources/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{name=accounts/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + datasources.GetDataSourceRequest(), + name="name_value", + ) + + +def test_get_data_source_rest_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest(request_type): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_sources_rest_required_fields( + request_type=datasources.ListDataSourcesRequest, +): + transport_class = transports.DataSourcesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datasources.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasources.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataSourcesServiceRestInterceptor(), + ) + client = DataSourcesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datasources.ListDataSourcesRequest.pb( + datasources.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datasources.ListDataSourcesResponse.to_json( + datasources.ListDataSourcesResponse() + ) + + request = datasources.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datasources.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=datasources.ListDataSourcesRequest +): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{parent=accounts/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + datasources.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + datasources.DataSource(), + ], + next_page_token="abc", + ), + datasources.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + ], + next_page_token="ghi", + ), + datasources.ListDataSourcesResponse( + data_sources=[ + datasources.DataSource(), + datasources.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datasources.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datasources.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.CreateDataSourceRequest, + dict, + ], +) +def test_create_data_source_rest(request_type): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request_init["data_source"] = { + "primary_product_data_source": { + "channel": 1, + "feed_label": "feed_label_value", + "content_language": "content_language_value", + "countries": ["countries_value1", "countries_value2"], + }, + "supplemental_product_data_source": { + "feed_label": "feed_label_value", + "content_language": "content_language_value", + }, + "local_inventory_data_source": { + "feed_label": "feed_label_value", + "content_language": "content_language_value", + }, + "regional_inventory_data_source": { + "feed_label": "feed_label_value", + "content_language": "content_language_value", + }, + "promotion_data_source": { + "target_country": "target_country_value", + "content_language": "content_language_value", + }, + "name": "name_value", + "data_source_id": 1462, + "display_name": "display_name_value", + "input": 1, + "file_input": { + "fetch_settings": { + "enabled": True, + "day_of_month": 1271, + "time_of_day": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "day_of_week": 1, + "time_zone": "time_zone_value", + "frequency": 1, + "fetch_uri": "fetch_uri_value", + "username": "username_value", + "password": "password_value", + }, + "file_name": "file_name_value", + "file_input_type": 1, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = datasources.CreateDataSourceRequest.meta.fields["data_source"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +def test_create_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_data_source + ] = mock_rpc + + request = {} + client.create_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_source_rest_required_fields( + request_type=datasources.CreateDataSourceRequest, +): + transport_class = transports.DataSourcesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_data_source_rest_unset_required_fields(): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_source_rest_interceptors(null_interceptor): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataSourcesServiceRestInterceptor(), + ) + client = DataSourcesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "post_create_data_source" + ) as post, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "pre_create_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datasources.CreateDataSourceRequest.pb( + datasources.CreateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datasources.DataSource.to_json( + datasources.DataSource() + ) + + request = datasources.CreateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datasources.DataSource() + + client.create_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_data_source_rest_bad_request( + transport: str = "rest", request_type=datasources.CreateDataSourceRequest +): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_data_source(request) + + +def test_create_data_source_rest_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{parent=accounts/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_create_data_source_rest_flattened_error(transport: str = "rest"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_source( + datasources.CreateDataSourceRequest(), + parent="parent_value", + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + ) + + +def test_create_data_source_rest_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source_rest(request_type): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"data_source": {"name": "accounts/sample1/dataSources/sample2"}} + request_init["data_source"] = { + "primary_product_data_source": { + "channel": 1, + "feed_label": "feed_label_value", + "content_language": "content_language_value", + "countries": ["countries_value1", "countries_value2"], + }, + "supplemental_product_data_source": { + "feed_label": "feed_label_value", + "content_language": "content_language_value", + }, + "local_inventory_data_source": { + "feed_label": "feed_label_value", + "content_language": "content_language_value", + }, + "regional_inventory_data_source": { + "feed_label": "feed_label_value", + "content_language": "content_language_value", + }, + "promotion_data_source": { + "target_country": "target_country_value", + "content_language": "content_language_value", + }, + "name": "accounts/sample1/dataSources/sample2", + "data_source_id": 1462, + "display_name": "display_name_value", + "input": 1, + "file_input": { + "fetch_settings": { + "enabled": True, + "day_of_month": 1271, + "time_of_day": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "day_of_week": 1, + "time_zone": "time_zone_value", + "frequency": 1, + "fetch_uri": "fetch_uri_value", + "username": "username_value", + "password": "password_value", + }, + "file_name": "file_name_value", + "file_input_type": 1, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = datasources.UpdateDataSourceRequest.meta.fields["data_source"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource( + name="name_value", + data_source_id=1462, + display_name="display_name_value", + input=datasources.DataSource.Input.API, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datasources.DataSource) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.display_name == "display_name_value" + assert response.input == datasources.DataSource.Input.API + + +def test_update_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_source_rest_required_fields( + request_type=datasources.UpdateDataSourceRequest, +): + transport_class = transports.DataSourcesServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_source_rest_unset_required_fields(): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "dataSource", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_source_rest_interceptors(null_interceptor): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataSourcesServiceRestInterceptor(), + ) + client = DataSourcesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "post_update_data_source" + ) as post, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "pre_update_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datasources.UpdateDataSourceRequest.pb( + datasources.UpdateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datasources.DataSource.to_json( + datasources.DataSource() + ) + + request = datasources.UpdateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datasources.DataSource() + + client.update_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_source_rest_bad_request( + transport: str = "rest", request_type=datasources.UpdateDataSourceRequest +): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"data_source": {"name": "accounts/sample1/dataSources/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_source(request) + + +def test_update_data_source_rest_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasources.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_source": {"name": "accounts/sample1/dataSources/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasources.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{data_source.name=accounts/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_source_rest_flattened_error(transport: str = "rest"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + datasources.UpdateDataSourceRequest(), + data_source=datasources.DataSource( + primary_product_data_source=datasourcetypes.PrimaryProductDataSource( + channel=datasourcetypes.PrimaryProductDataSource.Channel.ONLINE_PRODUCTS + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_source_rest_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.DeleteDataSourceRequest, + dict, + ], +) +def test_delete_data_source_rest(request_type): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_data_source(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_source + ] = mock_rpc + + request = {} + client.delete_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_source_rest_required_fields( + request_type=datasources.DeleteDataSourceRequest, +): + transport_class = transports.DataSourcesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_source_rest_unset_required_fields(): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_source_rest_interceptors(null_interceptor): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataSourcesServiceRestInterceptor(), + ) + client = DataSourcesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "pre_delete_data_source" + ) as pre: + pre.assert_not_called() + pb_message = datasources.DeleteDataSourceRequest.pb( + datasources.DeleteDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datasources.DeleteDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_data_source_rest_bad_request( + transport: str = "rest", request_type=datasources.DeleteDataSourceRequest +): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_data_source(request) + + +def test_delete_data_source_rest_flattened(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/dataSources/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{name=accounts/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_data_source_rest_flattened_error(transport: str = "rest"): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_source( + datasources.DeleteDataSourceRequest(), + name="name_value", + ) + + +def test_delete_data_source_rest_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasources.FetchDataSourceRequest, + dict, + ], +) +def test_fetch_data_source_rest(request_type): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_data_source(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_fetch_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_data_source + ] = mock_rpc + + request = {} + client.fetch_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_data_source_rest_required_fields( + request_type=datasources.FetchDataSourceRequest, +): + transport_class = transports.DataSourcesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_data_source_rest_unset_required_fields(): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_data_source_rest_interceptors(null_interceptor): + transport = transports.DataSourcesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataSourcesServiceRestInterceptor(), + ) + client = DataSourcesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataSourcesServiceRestInterceptor, "pre_fetch_data_source" + ) as pre: + pre.assert_not_called() + pb_message = datasources.FetchDataSourceRequest.pb( + datasources.FetchDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datasources.FetchDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.fetch_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_fetch_data_source_rest_bad_request( + transport: str = "rest", request_type=datasources.FetchDataSourceRequest +): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_data_source(request) + + +def test_fetch_data_source_rest_error(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataSourcesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataSourcesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataSourcesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataSourcesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataSourcesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataSourcesServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataSourcesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataSourcesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataSourcesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataSourcesServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataSourcesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataSourcesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceGrpcAsyncIOTransport, + transports.DataSourcesServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DataSourcesServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataSourcesServiceGrpcTransport, + ) + + +def test_data_sources_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataSourcesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_sources_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_datasources_v1beta.services.data_sources_service.transports.DataSourcesServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataSourcesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_data_source", + "list_data_sources", + "create_data_source", + "update_data_source", + "delete_data_source", + "fetch_data_source", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_sources_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_datasources_v1beta.services.data_sources_service.transports.DataSourcesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataSourcesServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_data_sources_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_datasources_v1beta.services.data_sources_service.transports.DataSourcesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataSourcesServiceTransport() + adc.assert_called_once() + + +def test_data_sources_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataSourcesServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceGrpcAsyncIOTransport, + ], +) +def test_data_sources_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceGrpcAsyncIOTransport, + transports.DataSourcesServiceRestTransport, + ], +) +def test_data_sources_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataSourcesServiceGrpcTransport, grpc_helpers), + (transports.DataSourcesServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_sources_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceGrpcAsyncIOTransport, + ], +) +def test_data_sources_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_data_sources_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DataSourcesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_sources_service_host_no_port(transport_name): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_sources_service_host_with_port(transport_name): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_data_sources_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataSourcesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataSourcesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.create_data_source._session + session2 = client2.transport.create_data_source._session + assert session1 != session2 + session1 = client1.transport.update_data_source._session + session2 = client2.transport.update_data_source._session + assert session1 != session2 + session1 = client1.transport.delete_data_source._session + session2 = client2.transport.delete_data_source._session + assert session1 != session2 + session1 = client1.transport.fetch_data_source._session + session2 = client2.transport.fetch_data_source._session + assert session1 != session2 + + +def test_data_sources_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataSourcesServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_sources_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataSourcesServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceGrpcAsyncIOTransport, + ], +) +def test_data_sources_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataSourcesServiceGrpcTransport, + transports.DataSourcesServiceGrpcAsyncIOTransport, + ], +) +def test_data_sources_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_source_path(): + account = "squid" + datasource = "clam" + expected = "accounts/{account}/dataSources/{datasource}".format( + account=account, + datasource=datasource, + ) + actual = DataSourcesServiceClient.data_source_path(account, datasource) + assert expected == actual + + +def test_parse_data_source_path(): + expected = { + "account": "whelk", + "datasource": "octopus", + } + path = DataSourcesServiceClient.data_source_path(**expected) + + # Check that the path construction is reversible. + actual = DataSourcesServiceClient.parse_data_source_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataSourcesServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DataSourcesServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataSourcesServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataSourcesServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DataSourcesServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataSourcesServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataSourcesServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DataSourcesServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataSourcesServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = DataSourcesServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DataSourcesServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataSourcesServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataSourcesServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DataSourcesServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataSourcesServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataSourcesServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataSourcesServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataSourcesServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataSourcesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DataSourcesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DataSourcesServiceClient, transports.DataSourcesServiceGrpcTransport), + ( + DataSourcesServiceAsyncClient, + transports.DataSourcesServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-inventories/CHANGELOG.md b/packages/google-shopping-merchant-inventories/CHANGELOG.md index 00ae7242a473..431060790df4 100644 --- a/packages/google-shopping-merchant-inventories/CHANGELOG.md +++ b/packages/google-shopping-merchant-inventories/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-inventories-v0.1.6...google-shopping-merchant-inventories-v0.1.7) (2024-05-27) + + +### Documentation + +* change in wording : feed specification -> data specification ([995bdaf](https://github.com/googleapis/google-cloud-python/commit/995bdaf5d95fcbfae7ee63393fb394cc2dba687a)) + ## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-inventories-v0.1.5...google-shopping-merchant-inventories-v0.1.6) (2024-04-15) diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py index 1397346f1604..77a15999bbdc 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py @@ -465,7 +465,7 @@ async def sample_insert_local_inventory(): for a specific product at the store specified by [storeCode][google.shopping.merchant.inventories.v1beta.LocalInventory.store_code]. For a list of all accepted attribute values, see the - [local product inventory feed + [local product inventory data specification](\ https://support.google.com/merchants/answer/3061342). """ diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py index c91194887078..7f750427ea14 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py @@ -878,7 +878,7 @@ def sample_insert_local_inventory(): for a specific product at the store specified by [storeCode][google.shopping.merchant.inventories.v1beta.LocalInventory.store_code]. For a list of all accepted attribute values, see the - [local product inventory feed + [local product inventory data specification](\ https://support.google.com/merchants/answer/3061342). """ diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/transports/rest.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/transports/rest.py index c91fcab0b720..78f8207ceef3 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/transports/rest.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/transports/rest.py @@ -361,7 +361,7 @@ def __call__( specified by [``storeCode``][google.shopping.merchant.inventories.v1beta.LocalInventory.store_code]. For a list of all accepted attribute values, see the - `local product inventory feed + `local product inventory data specification `__. """ diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py index ebb51c5aefa8..83d34de175df 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py @@ -474,7 +474,7 @@ async def sample_insert_regional_inventory(): product in a specific [region][google.shopping.merchant.inventories.v1beta.RegionalInventory.region]. For a list of all accepted attribute values, see the - [regional product inventory feed + [regional product inventory data specification](\ https://support.google.com/merchants/answer/9698880). """ diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py index 1a16d061bc1f..3ec0e066511f 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py @@ -889,7 +889,7 @@ def sample_insert_regional_inventory(): product in a specific [region][google.shopping.merchant.inventories.v1beta.RegionalInventory.region]. For a list of all accepted attribute values, see the - [regional product inventory feed + [regional product inventory data specification](\ https://support.google.com/merchants/answer/9698880). """ diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/transports/rest.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/transports/rest.py index 36879ce22bb8..3ecbd6993bae 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/transports/rest.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/transports/rest.py @@ -370,7 +370,7 @@ def __call__( availability for a given product in a specific [``region``][google.shopping.merchant.inventories.v1beta.RegionalInventory.region]. For a list of all accepted attribute values, see the - `regional product inventory feed + `regional product inventory data specification `__. """ diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/localinventory.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/localinventory.py index 86c260854d5d..febf2073360c 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/localinventory.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/localinventory.py @@ -38,7 +38,7 @@ class LocalInventory(proto.Message): information for a specific product at the store specified by [``storeCode``][google.shopping.merchant.inventories.v1beta.LocalInventory.store_code]. For a list of all accepted attribute values, see the `local product - inventory feed + inventory data specification `__. @@ -56,7 +56,7 @@ class LocalInventory(proto.Message): store_code (str): Required. Immutable. Store code (the store ID from your Business Profile) of the physical store the product is sold - in. See the `Local product inventory feed + in. See the `Local product inventory data specification `__ for more information. price (google.shopping.type.types.Price): @@ -69,7 +69,7 @@ class LocalInventory(proto.Message): The ``TimePeriod`` of the sale at this store. availability (str): Availability of the product at this store. For accepted - attribute values, see the `local product inventory feed + attribute values, see the `local product inventory data specification `__ This field is a member of `oneof`_ ``_availability``. @@ -82,7 +82,7 @@ class LocalInventory(proto.Message): Supported pickup method for this product. Unless the value is ``"not supported"``, this field must be submitted together with ``pickupSla``. For accepted attribute values, - see the `local product inventory feed + see the `local product inventory data specification `__ This field is a member of `oneof`_ ``_pickup_method``. @@ -90,7 +90,7 @@ class LocalInventory(proto.Message): Relative time period from the order date for an order for this product, from this store, to be ready for pickup. Must be submitted with ``pickupMethod``. For accepted attribute - values, see the `local product inventory feed + values, see the `local product inventory data specification `__ This field is a member of `oneof`_ ``_pickup_sla``. @@ -102,7 +102,7 @@ class LocalInventory(proto.Message): custom_attributes (MutableSequence[google.shopping.type.types.CustomAttribute]): A list of custom (merchant-provided) attributes. You can also use ``CustomAttribute`` to submit any attribute of the - feed specification in its generic form. + data specification in its generic form. """ name: str = proto.Field( diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/regionalinventory.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/regionalinventory.py index 7c3229cdd46f..6afeef2986ba 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/regionalinventory.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/types/regionalinventory.py @@ -39,7 +39,7 @@ class RegionalInventory(proto.Message): specific [``region``][google.shopping.merchant.inventories.v1beta.RegionalInventory.region]. For a list of all accepted attribute values, see the `regional - product inventory feed + product inventory data specification `__. @@ -70,14 +70,14 @@ class RegionalInventory(proto.Message): The ``TimePeriod`` of the sale price in this region. availability (str): Availability of the product in this region. For accepted - attribute values, see the `regional product inventory feed + attribute values, see the `regional product inventory data specification `__ This field is a member of `oneof`_ ``_availability``. custom_attributes (MutableSequence[google.shopping.type.types.CustomAttribute]): A list of custom (merchant-provided) attributes. You can also use ``CustomAttribute`` to submit any attribute of the - feed specification in its generic form. + data specification in its generic form. """ name: str = proto.Field( diff --git a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py index 0c1be814efbe..15d5086d656f 100644 --- a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py +++ b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py @@ -1623,13 +1623,13 @@ def test_list_local_inventories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_local_inventories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py index 1b62492aad64..8815b39c6b78 100644 --- a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py +++ b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py @@ -1631,13 +1631,13 @@ def test_list_regional_inventories_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_regional_inventories(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-lfp/CHANGELOG.md b/packages/google-shopping-merchant-lfp/CHANGELOG.md index e0d89db5fb10..9138d9d749ac 100644 --- a/packages/google-shopping-merchant-lfp/CHANGELOG.md +++ b/packages/google-shopping-merchant-lfp/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-lfp-v0.1.0...google-shopping-merchant-lfp-v0.1.1) (2024-05-27) + + +### Documentation + +* change in wording : feed specification -> data specification ([57704b4](https://github.com/googleapis/google-cloud-python/commit/57704b4dec3397ad91e76f338c03e7e955ffcfa9)) + ## 0.1.0 (2024-04-17) diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py index b88af7f561ca..51df1732e35e 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py @@ -318,11 +318,12 @@ async def sample_get_lfp_store(): Returns: google.shopping.merchant_lfp_v1beta.types.LfpStore: A store for the merchant. This will - be used to match to a Google Business - Profile listing for the target merchant. - If a matching listing can't be found, - the inventories or sales submitted with - the store code will not be used. + be used to match to a store under the + Google Business Profile of the target + merchant. If a matching store can't be + found, the inventories or sales + submitted with the store code will not + be used. """ # Create or coerce a protobuf request object. @@ -442,11 +443,12 @@ async def sample_insert_lfp_store(): Returns: google.shopping.merchant_lfp_v1beta.types.LfpStore: A store for the merchant. This will - be used to match to a Google Business - Profile listing for the target merchant. - If a matching listing can't be found, - the inventories or sales submitted with - the store code will not be used. + be used to match to a store under the + Google Business Profile of the target + merchant. If a matching store can't be + found, the inventories or sales + submitted with the store code will not + be used. """ # Create or coerce a protobuf request object. diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py index 050769ffbc82..a2b73bd2d210 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py @@ -738,11 +738,12 @@ def sample_get_lfp_store(): Returns: google.shopping.merchant_lfp_v1beta.types.LfpStore: A store for the merchant. This will - be used to match to a Google Business - Profile listing for the target merchant. - If a matching listing can't be found, - the inventories or sales submitted with - the store code will not be used. + be used to match to a store under the + Google Business Profile of the target + merchant. If a matching store can't be + found, the inventories or sales + submitted with the store code will not + be used. """ # Create or coerce a protobuf request object. @@ -859,11 +860,12 @@ def sample_insert_lfp_store(): Returns: google.shopping.merchant_lfp_v1beta.types.LfpStore: A store for the merchant. This will - be used to match to a Google Business - Profile listing for the target merchant. - If a matching listing can't be found, - the inventories or sales submitted with - the store code will not be used. + be used to match to a store under the + Google Business Profile of the target + merchant. If a matching store can't be + found, the inventories or sales + submitted with the store code will not + be used. """ # Create or coerce a protobuf request object. diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/transports/rest.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/transports/rest.py index e5bf1838600d..cbd0643bc6ea 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/transports/rest.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/transports/rest.py @@ -385,11 +385,12 @@ def __call__( Returns: ~.lfpstore.LfpStore: A store for the merchant. This will - be used to match to a Google Business - Profile listing for the target merchant. - If a matching listing can't be found, - the inventories or sales submitted with - the store code will not be used. + be used to match to a store under the + Google Business Profile of the target + merchant. If a matching store can't be + found, the inventories or sales + submitted with the store code will not + be used. """ @@ -477,11 +478,12 @@ def __call__( Returns: ~.lfpstore.LfpStore: A store for the merchant. This will - be used to match to a Google Business - Profile listing for the target merchant. - If a matching listing can't be found, - the inventories or sales submitted with - the store code will not be used. + be used to match to a store under the + Google Business Profile of the target + merchant. If a matching store can't be + found, the inventories or sales + submitted with the store code will not + be used. """ diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpinventory.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpinventory.py index 5369105a2be5..a846c0b22975 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpinventory.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpinventory.py @@ -71,7 +71,7 @@ class LfpInventory(proto.Message): availability (str): Required. Availability of the product at this store. For accepted attribute values, see the `local product inventory - feed + data specification `__ quantity (int): Optional. Quantity of the product available @@ -87,7 +87,7 @@ class LfpInventory(proto.Message): Optional. Supported pickup method for this offer. Unless the value is "not supported", this field must be submitted together with ``pickupSla``. For accepted attribute values, - see the `local product inventory feed + see the `local product inventory data specification `__. This field is a member of `oneof`_ ``_pickup_method``. @@ -95,7 +95,7 @@ class LfpInventory(proto.Message): Optional. Expected date that an order will be ready for pickup relative to the order date. Must be submitted together with ``pickupMethod``. For accepted attribute - values, see the `local product inventory feed + values, see the `local product inventory data specification `__. This field is a member of `oneof`_ ``_pickup_sla``. diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpstore.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpstore.py index 728b93509e20..9ccfec796041 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpstore.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/types/lfpstore.py @@ -34,8 +34,8 @@ class LfpStore(proto.Message): r"""A store for the merchant. This will be used to match to a - Google Business Profile listing for the target merchant. If a - matching listing can't be found, the inventories or sales + store under the Google Business Profile of the target merchant. + If a matching store can't be found, the inventories or sales submitted with the store code will not be used. @@ -82,7 +82,7 @@ class LfpStore(proto.Message): This field is a member of `oneof`_ ``_place_id``. matching_state (google.shopping.merchant_lfp_v1beta.types.LfpStore.StoreMatchingState): Optional. Output only. The state of matching to a Google - Business Profile listing. See + Business Profile. See [matchingStateHint][google.shopping.merchant.lfp.v1beta.LfpStore.matching_state_hint] for further details if no match is found. matching_state_hint (str): @@ -112,8 +112,7 @@ class LfpStore(proto.Message): """ class StoreMatchingState(proto.Enum): - r"""The state of matching ``LfpStore`` to a Google Business Profile - listing. + r"""The state of matching ``LfpStore`` to a Google Business Profile. Values: STORE_MATCHING_STATE_UNSPECIFIED (0): diff --git a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py index d2e0351e37db..8fd5b4c71576 100644 --- a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py +++ b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py @@ -2757,13 +2757,13 @@ def test_list_lfp_stores_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_lfp_stores(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py b/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py index ec47dfa7630c..a797d89a33c5 100644 --- a/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py +++ b/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py @@ -3262,13 +3262,13 @@ def test_list_notification_subscriptions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_notification_subscriptions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-products/.OwlBot.yaml b/packages/google-shopping-merchant-products/.OwlBot.yaml new file mode 100644 index 000000000000..eb2a6cbdd482 --- /dev/null +++ b/packages/google-shopping-merchant-products/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/shopping/merchant/products/(v.*)/.*-py + dest: /owl-bot-staging/google-shopping-merchant-products/$1 +api-name: google-shopping-merchant-products diff --git a/packages/google-shopping-merchant-products/.coveragerc b/packages/google-shopping-merchant-products/.coveragerc new file mode 100644 index 000000000000..0d94c2da5109 --- /dev/null +++ b/packages/google-shopping-merchant-products/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/shopping/merchant_products/__init__.py + google/shopping/merchant_products/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-shopping-merchant-products/.flake8 b/packages/google-shopping-merchant-products/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-shopping-merchant-products/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-shopping-merchant-products/.gitignore b/packages/google-shopping-merchant-products/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-shopping-merchant-products/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-shopping-merchant-products/.repo-metadata.json b/packages/google-shopping-merchant-products/.repo-metadata.json new file mode 100644 index 000000000000..31bda2b88952 --- /dev/null +++ b/packages/google-shopping-merchant-products/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-shopping-merchant-products", + "name_pretty": "Merchant API", + "api_description": "Programmatically manage your Merchant Center accounts.", + "product_documentation": "https://developers.google.com/merchant/api", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-products/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-shopping-merchant-products", + "api_id": "products.googleapis.com", + "default_version": "v1beta", + "codeowner_team": "", + "api_shortname": "products" +} diff --git a/packages/google-shopping-merchant-products/CHANGELOG.md b/packages/google-shopping-merchant-products/CHANGELOG.md new file mode 100644 index 000000000000..4bd93e34d88d --- /dev/null +++ b/packages/google-shopping-merchant-products/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-06-05) + + +### Features + +* add initial files for google.shopping.merchant.products.v1beta ([#12776](https://github.com/googleapis/google-cloud-python/issues/12776)) ([e1e7dbb](https://github.com/googleapis/google-cloud-python/commit/e1e7dbb1e65883436fdc520f96caabfcf9ab7b46)) + +## Changelog diff --git a/packages/google-shopping-merchant-products/CODE_OF_CONDUCT.md b/packages/google-shopping-merchant-products/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-shopping-merchant-products/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-shopping-merchant-products/CONTRIBUTING.rst b/packages/google-shopping-merchant-products/CONTRIBUTING.rst new file mode 100644 index 000000000000..9017c23d46ad --- /dev/null +++ b/packages/google-shopping-merchant-products/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-shopping-merchant-products + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-shopping-merchant-products/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-shopping-merchant-products/LICENSE b/packages/google-shopping-merchant-products/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-shopping-merchant-products/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-shopping-merchant-products/MANIFEST.in b/packages/google-shopping-merchant-products/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-shopping-merchant-products/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-shopping-merchant-products/README.rst b/packages/google-shopping-merchant-products/README.rst new file mode 100644 index 000000000000..f8455a2d9403 --- /dev/null +++ b/packages/google-shopping-merchant-products/README.rst @@ -0,0 +1,108 @@ +Python Client for Merchant API +============================== + +|preview| |pypi| |versions| + +`Merchant API`_: Programmatically manage your Merchant Center accounts. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-shopping-merchant-products.svg + :target: https://pypi.org/project/google-shopping-merchant-products/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-shopping-merchant-products.svg + :target: https://pypi.org/project/google-shopping-merchant-products/ +.. _Merchant API: https://developers.google.com/merchant/api +.. _Client Library Documentation: https://googleapis.dev/python/google-shopping-merchant-products/latest +.. _Product Documentation: https://developers.google.com/merchant/api + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Merchant API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Merchant API.: https://developers.google.com/merchant/api +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-shopping-merchant-products + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-shopping-merchant-products + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Merchant API + to see other available methods on the client. +- Read the `Merchant API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Merchant API Product documentation: https://developers.google.com/merchant/api +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-shopping-merchant-products/docs/CHANGELOG.md b/packages/google-shopping-merchant-products/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-shopping-merchant-products/docs/README.rst b/packages/google-shopping-merchant-products/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-shopping-merchant-products/docs/_static/custom.css b/packages/google-shopping-merchant-products/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-shopping-merchant-products/docs/_templates/layout.html b/packages/google-shopping-merchant-products/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-shopping-merchant-products/docs/conf.py b/packages/google-shopping-merchant-products/docs/conf.py new file mode 100644 index 000000000000..54f7734b648e --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-shopping-merchant-products documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-shopping-merchant-products" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-shopping-merchant-products", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-shopping-merchant-products-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-shopping-merchant-products.tex", + "google-shopping-merchant-products Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-shopping-merchant-products", + "google-shopping-merchant-products Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-shopping-merchant-products", + "google-shopping-merchant-products Documentation", + author, + "google-shopping-merchant-products", + "google-shopping-merchant-products Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-shopping-merchant-products/docs/index.rst b/packages/google-shopping-merchant-products/docs/index.rst new file mode 100644 index 000000000000..b420f11ffe56 --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + merchant_products_v1beta/services_ + merchant_products_v1beta/types_ + + +Changelog +--------- + +For a list of all ``google-shopping-merchant-products`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/product_inputs_service.rst b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/product_inputs_service.rst new file mode 100644 index 000000000000..99e2c59bfb43 --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/product_inputs_service.rst @@ -0,0 +1,6 @@ +ProductInputsService +-------------------------------------- + +.. automodule:: google.shopping.merchant_products_v1beta.services.product_inputs_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/products_service.rst b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/products_service.rst new file mode 100644 index 000000000000..f20ce0b9e362 --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/products_service.rst @@ -0,0 +1,10 @@ +ProductsService +--------------------------------- + +.. automodule:: google.shopping.merchant_products_v1beta.services.products_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_products_v1beta.services.products_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/services_.rst b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/services_.rst new file mode 100644 index 000000000000..ce641539544e --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/services_.rst @@ -0,0 +1,7 @@ +Services for Google Shopping Merchant Products v1beta API +========================================================= +.. toctree:: + :maxdepth: 2 + + product_inputs_service + products_service diff --git a/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/types_.rst b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/types_.rst new file mode 100644 index 000000000000..af3a6c59c20f --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/merchant_products_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Shopping Merchant Products v1beta API +====================================================== + +.. automodule:: google.shopping.merchant_products_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-shopping-merchant-products/docs/multiprocessing.rst b/packages/google-shopping-merchant-products/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-shopping-merchant-products/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py new file mode 100644 index 000000000000..edbffe989471 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_products import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.shopping.merchant_products_v1beta.services.product_inputs_service.async_client import ( + ProductInputsServiceAsyncClient, +) +from google.shopping.merchant_products_v1beta.services.product_inputs_service.client import ( + ProductInputsServiceClient, +) +from google.shopping.merchant_products_v1beta.services.products_service.async_client import ( + ProductsServiceAsyncClient, +) +from google.shopping.merchant_products_v1beta.services.products_service.client import ( + ProductsServiceClient, +) +from google.shopping.merchant_products_v1beta.types.productinputs import ( + DeleteProductInputRequest, + InsertProductInputRequest, + ProductInput, +) +from google.shopping.merchant_products_v1beta.types.products import ( + GetProductRequest, + ListProductsRequest, + ListProductsResponse, + Product, +) +from google.shopping.merchant_products_v1beta.types.products_common import ( + Attributes, + Certification, + CloudExportAdditionalProperties, + FreeShippingThreshold, + Installment, + LoyaltyPoints, + LoyaltyProgram, + ProductDetail, + ProductDimension, + ProductStatus, + ProductStructuredDescription, + ProductStructuredTitle, + ProductWeight, + Shipping, + ShippingDimension, + ShippingWeight, + SubscriptionCost, + SubscriptionPeriod, + Tax, + UnitPricingBaseMeasure, + UnitPricingMeasure, +) + +__all__ = ( + "ProductInputsServiceClient", + "ProductInputsServiceAsyncClient", + "ProductsServiceClient", + "ProductsServiceAsyncClient", + "DeleteProductInputRequest", + "InsertProductInputRequest", + "ProductInput", + "GetProductRequest", + "ListProductsRequest", + "ListProductsResponse", + "Product", + "Attributes", + "Certification", + "CloudExportAdditionalProperties", + "FreeShippingThreshold", + "Installment", + "LoyaltyPoints", + "LoyaltyProgram", + "ProductDetail", + "ProductDimension", + "ProductStatus", + "ProductStructuredDescription", + "ProductStructuredTitle", + "ProductWeight", + "Shipping", + "ShippingDimension", + "ShippingWeight", + "SubscriptionCost", + "Tax", + "UnitPricingBaseMeasure", + "UnitPricingMeasure", + "SubscriptionPeriod", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products/py.typed b/packages/google-shopping-merchant-products/google/shopping/merchant_products/py.typed new file mode 100644 index 000000000000..962817aecdcd --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-products package uses inline types. diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/__init__.py new file mode 100644 index 000000000000..7b38ce3fa213 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/__init__.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_products_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.product_inputs_service import ( + ProductInputsServiceAsyncClient, + ProductInputsServiceClient, +) +from .services.products_service import ProductsServiceAsyncClient, ProductsServiceClient +from .types.productinputs import ( + DeleteProductInputRequest, + InsertProductInputRequest, + ProductInput, +) +from .types.products import ( + GetProductRequest, + ListProductsRequest, + ListProductsResponse, + Product, +) +from .types.products_common import ( + Attributes, + Certification, + CloudExportAdditionalProperties, + FreeShippingThreshold, + Installment, + LoyaltyPoints, + LoyaltyProgram, + ProductDetail, + ProductDimension, + ProductStatus, + ProductStructuredDescription, + ProductStructuredTitle, + ProductWeight, + Shipping, + ShippingDimension, + ShippingWeight, + SubscriptionCost, + SubscriptionPeriod, + Tax, + UnitPricingBaseMeasure, + UnitPricingMeasure, +) + +__all__ = ( + "ProductInputsServiceAsyncClient", + "ProductsServiceAsyncClient", + "Attributes", + "Certification", + "CloudExportAdditionalProperties", + "DeleteProductInputRequest", + "FreeShippingThreshold", + "GetProductRequest", + "InsertProductInputRequest", + "Installment", + "ListProductsRequest", + "ListProductsResponse", + "LoyaltyPoints", + "LoyaltyProgram", + "Product", + "ProductDetail", + "ProductDimension", + "ProductInput", + "ProductInputsServiceClient", + "ProductStatus", + "ProductStructuredDescription", + "ProductStructuredTitle", + "ProductWeight", + "ProductsServiceClient", + "Shipping", + "ShippingDimension", + "ShippingWeight", + "SubscriptionCost", + "SubscriptionPeriod", + "Tax", + "UnitPricingBaseMeasure", + "UnitPricingMeasure", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..ac0f69541032 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_metadata.json @@ -0,0 +1,107 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.shopping.merchant_products_v1beta", + "protoPackage": "google.shopping.merchant.products.v1beta", + "schema": "1.0", + "services": { + "ProductInputsService": { + "clients": { + "grpc": { + "libraryClient": "ProductInputsServiceClient", + "rpcs": { + "DeleteProductInput": { + "methods": [ + "delete_product_input" + ] + }, + "InsertProductInput": { + "methods": [ + "insert_product_input" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ProductInputsServiceAsyncClient", + "rpcs": { + "DeleteProductInput": { + "methods": [ + "delete_product_input" + ] + }, + "InsertProductInput": { + "methods": [ + "insert_product_input" + ] + } + } + }, + "rest": { + "libraryClient": "ProductInputsServiceClient", + "rpcs": { + "DeleteProductInput": { + "methods": [ + "delete_product_input" + ] + }, + "InsertProductInput": { + "methods": [ + "insert_product_input" + ] + } + } + } + } + }, + "ProductsService": { + "clients": { + "grpc": { + "libraryClient": "ProductsServiceClient", + "rpcs": { + "GetProduct": { + "methods": [ + "get_product" + ] + }, + "ListProducts": { + "methods": [ + "list_products" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ProductsServiceAsyncClient", + "rpcs": { + "GetProduct": { + "methods": [ + "get_product" + ] + }, + "ListProducts": { + "methods": [ + "list_products" + ] + } + } + }, + "rest": { + "libraryClient": "ProductsServiceClient", + "rpcs": { + "GetProduct": { + "methods": [ + "get_product" + ] + }, + "ListProducts": { + "methods": [ + "list_products" + ] + } + } + } + } + } + } +} diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/py.typed b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/py.typed new file mode 100644 index 000000000000..962817aecdcd --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-products package uses inline types. diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/__init__.py new file mode 100644 index 000000000000..0f4a8295baa1 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ProductInputsServiceAsyncClient +from .client import ProductInputsServiceClient + +__all__ = ( + "ProductInputsServiceClient", + "ProductInputsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py new file mode 100644 index 000000000000..f64af9e701a5 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_products_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.type.types import types + +from google.shopping.merchant_products_v1beta.types import ( + productinputs, + products_common, +) + +from .client import ProductInputsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ProductInputsServiceTransport +from .transports.grpc_asyncio import ProductInputsServiceGrpcAsyncIOTransport + + +class ProductInputsServiceAsyncClient: + """Service to use ProductInput resource. + This service works for products with online channel only. + """ + + _client: ProductInputsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProductInputsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProductInputsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProductInputsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProductInputsServiceClient._DEFAULT_UNIVERSE + + product_path = staticmethod(ProductInputsServiceClient.product_path) + parse_product_path = staticmethod(ProductInputsServiceClient.parse_product_path) + product_input_path = staticmethod(ProductInputsServiceClient.product_input_path) + parse_product_input_path = staticmethod( + ProductInputsServiceClient.parse_product_input_path + ) + common_billing_account_path = staticmethod( + ProductInputsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProductInputsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProductInputsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ProductInputsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ProductInputsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ProductInputsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProductInputsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ProductInputsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ProductInputsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ProductInputsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductInputsServiceAsyncClient: The constructed client. + """ + return ProductInputsServiceClient.from_service_account_info.__func__(ProductInputsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductInputsServiceAsyncClient: The constructed client. + """ + return ProductInputsServiceClient.from_service_account_file.__func__(ProductInputsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProductInputsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProductInputsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProductInputsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ProductInputsServiceClient).get_transport_class, + type(ProductInputsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ProductInputsServiceTransport, + Callable[..., ProductInputsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the product inputs service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProductInputsServiceTransport,Callable[..., ProductInputsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProductInputsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProductInputsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def insert_product_input( + self, + request: Optional[Union[productinputs.InsertProductInputRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> productinputs.ProductInput: + r"""Uploads a product input to your Merchant Center + account. If an input with the same contentLanguage, + offerId, and dataSource already exists, this method + replaces that entry. + + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + async def sample_insert_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceAsyncClient() + + # Initialize request argument(s) + product_input = merchant_products_v1beta.ProductInput() + product_input.channel = "LOCAL" + product_input.offer_id = "offer_id_value" + product_input.content_language = "content_language_value" + product_input.feed_label = "feed_label_value" + + request = merchant_products_v1beta.InsertProductInputRequest( + parent="parent_value", + product_input=product_input, + data_source="data_source_value", + ) + + # Make the request + response = await client.insert_product_input(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_products_v1beta.types.InsertProductInputRequest, dict]]): + The request object. Request message for the + InsertProductInput method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_products_v1beta.types.ProductInput: + This resource represents input data you submit for a product, not the + processed product that you see in Merchant Center, in + Shopping ads, or across Google surfaces. Product + inputs, rules and supplemental data source data are + combined to create the processed + [product][google.shopping.content.bundles.Products.Product]. + + Required product input attributes to pass data + validation checks are primarily defined in the + [Products Data + Specification](\ https://support.google.com/merchants/answer/188494). + + The following attributes are required: + [feedLabel][google.shopping.content.bundles.Products.feed_label], + [contentLanguage][google.shopping.content.bundles.Products.content_language] + and + [offerId][google.shopping.content.bundles.Products.offer_id]. + + After inserting, updating, or deleting a product + input, it may take several minutes before the + processed product can be retrieved. + + All fields in the product input and its sub-messages + match the English name of their corresponding + attribute in the vertical spec with [some + exceptions](\ https://support.google.com/merchants/answer/7052112). + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, productinputs.InsertProductInputRequest): + request = productinputs.InsertProductInputRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.insert_product_input + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_product_input( + self, + request: Optional[Union[productinputs.DeleteProductInputRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a product input from your Merchant Center + account. + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + async def sample_delete_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.DeleteProductInputRequest( + name="name_value", + data_source="data_source_value", + ) + + # Make the request + await client.delete_product_input(request=request) + + Args: + request (Optional[Union[google.shopping.merchant_products_v1beta.types.DeleteProductInputRequest, dict]]): + The request object. Request message for the + DeleteProductInput method. + name (:class:`str`): + Required. The name of the product + input resource to delete. Format: + accounts/{account}/productInputs/{product} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, productinputs.DeleteProductInputRequest): + request = productinputs.DeleteProductInputRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_product_input + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ProductInputsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProductInputsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py new file mode 100644 index 000000000000..69f62a38bf37 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py @@ -0,0 +1,942 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_products_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.type.types import types + +from google.shopping.merchant_products_v1beta.types import ( + productinputs, + products_common, +) + +from .transports.base import DEFAULT_CLIENT_INFO, ProductInputsServiceTransport +from .transports.grpc import ProductInputsServiceGrpcTransport +from .transports.grpc_asyncio import ProductInputsServiceGrpcAsyncIOTransport +from .transports.rest import ProductInputsServiceRestTransport + + +class ProductInputsServiceClientMeta(type): + """Metaclass for the ProductInputsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ProductInputsServiceTransport]] + _transport_registry["grpc"] = ProductInputsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ProductInputsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ProductInputsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProductInputsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProductInputsServiceClient(metaclass=ProductInputsServiceClientMeta): + """Service to use ProductInput resource. + This service works for products with online channel only. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductInputsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductInputsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProductInputsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProductInputsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def product_path( + account: str, + product: str, + ) -> str: + """Returns a fully-qualified product string.""" + return "accounts/{account}/products/{product}".format( + account=account, + product=product, + ) + + @staticmethod + def parse_product_path(path: str) -> Dict[str, str]: + """Parses a product path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/products/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def product_input_path( + account: str, + productinput: str, + ) -> str: + """Returns a fully-qualified product_input string.""" + return "accounts/{account}/productInputs/{productinput}".format( + account=account, + productinput=productinput, + ) + + @staticmethod + def parse_product_input_path(path: str) -> Dict[str, str]: + """Parses a product_input path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/productInputs/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProductInputsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProductInputsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProductInputsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProductInputsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProductInputsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProductInputsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ProductInputsServiceTransport, + Callable[..., ProductInputsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the product inputs service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProductInputsServiceTransport,Callable[..., ProductInputsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProductInputsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProductInputsServiceClient._read_environment_variables() + self._client_cert_source = ProductInputsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProductInputsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProductInputsServiceTransport) + if transport_provided: + # transport is a ProductInputsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProductInputsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ProductInputsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProductInputsServiceTransport], + Callable[..., ProductInputsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProductInputsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def insert_product_input( + self, + request: Optional[Union[productinputs.InsertProductInputRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> productinputs.ProductInput: + r"""Uploads a product input to your Merchant Center + account. If an input with the same contentLanguage, + offerId, and dataSource already exists, this method + replaces that entry. + + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + def sample_insert_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceClient() + + # Initialize request argument(s) + product_input = merchant_products_v1beta.ProductInput() + product_input.channel = "LOCAL" + product_input.offer_id = "offer_id_value" + product_input.content_language = "content_language_value" + product_input.feed_label = "feed_label_value" + + request = merchant_products_v1beta.InsertProductInputRequest( + parent="parent_value", + product_input=product_input, + data_source="data_source_value", + ) + + # Make the request + response = client.insert_product_input(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_products_v1beta.types.InsertProductInputRequest, dict]): + The request object. Request message for the + InsertProductInput method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_products_v1beta.types.ProductInput: + This resource represents input data you submit for a product, not the + processed product that you see in Merchant Center, in + Shopping ads, or across Google surfaces. Product + inputs, rules and supplemental data source data are + combined to create the processed + [product][google.shopping.content.bundles.Products.Product]. + + Required product input attributes to pass data + validation checks are primarily defined in the + [Products Data + Specification](\ https://support.google.com/merchants/answer/188494). + + The following attributes are required: + [feedLabel][google.shopping.content.bundles.Products.feed_label], + [contentLanguage][google.shopping.content.bundles.Products.content_language] + and + [offerId][google.shopping.content.bundles.Products.offer_id]. + + After inserting, updating, or deleting a product + input, it may take several minutes before the + processed product can be retrieved. + + All fields in the product input and its sub-messages + match the English name of their corresponding + attribute in the vertical spec with [some + exceptions](\ https://support.google.com/merchants/answer/7052112). + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, productinputs.InsertProductInputRequest): + request = productinputs.InsertProductInputRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert_product_input] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_product_input( + self, + request: Optional[Union[productinputs.DeleteProductInputRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a product input from your Merchant Center + account. + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + def sample_delete_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.DeleteProductInputRequest( + name="name_value", + data_source="data_source_value", + ) + + # Make the request + client.delete_product_input(request=request) + + Args: + request (Union[google.shopping.merchant_products_v1beta.types.DeleteProductInputRequest, dict]): + The request object. Request message for the + DeleteProductInput method. + name (str): + Required. The name of the product + input resource to delete. Format: + accounts/{account}/productInputs/{product} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, productinputs.DeleteProductInputRequest): + request = productinputs.DeleteProductInputRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_product_input] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "ProductInputsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProductInputsServiceClient",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/__init__.py new file mode 100644 index 000000000000..4157decbd9e2 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProductInputsServiceTransport +from .grpc import ProductInputsServiceGrpcTransport +from .grpc_asyncio import ProductInputsServiceGrpcAsyncIOTransport +from .rest import ProductInputsServiceRestInterceptor, ProductInputsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ProductInputsServiceTransport]] +_transport_registry["grpc"] = ProductInputsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ProductInputsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ProductInputsServiceRestTransport + +__all__ = ( + "ProductInputsServiceTransport", + "ProductInputsServiceGrpcTransport", + "ProductInputsServiceGrpcAsyncIOTransport", + "ProductInputsServiceRestTransport", + "ProductInputsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/base.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/base.py new file mode 100644 index 000000000000..b4de8e9f999a --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/base.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_products_v1beta import gapic_version as package_version +from google.shopping.merchant_products_v1beta.types import productinputs + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProductInputsServiceTransport(abc.ABC): + """Abstract transport class for ProductInputsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.insert_product_input: gapic_v1.method.wrap_method( + self.insert_product_input, + default_timeout=None, + client_info=client_info, + ), + self.delete_product_input: gapic_v1.method.wrap_method( + self.delete_product_input, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def insert_product_input( + self, + ) -> Callable[ + [productinputs.InsertProductInputRequest], + Union[productinputs.ProductInput, Awaitable[productinputs.ProductInput]], + ]: + raise NotImplementedError() + + @property + def delete_product_input( + self, + ) -> Callable[ + [productinputs.DeleteProductInputRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProductInputsServiceTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/grpc.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/grpc.py new file mode 100644 index 000000000000..236b33d28b25 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/grpc.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_products_v1beta.types import productinputs + +from .base import DEFAULT_CLIENT_INFO, ProductInputsServiceTransport + + +class ProductInputsServiceGrpcTransport(ProductInputsServiceTransport): + """gRPC backend transport for ProductInputsService. + + Service to use ProductInput resource. + This service works for products with online channel only. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def insert_product_input( + self, + ) -> Callable[ + [productinputs.InsertProductInputRequest], productinputs.ProductInput + ]: + r"""Return a callable for the insert product input method over gRPC. + + Uploads a product input to your Merchant Center + account. If an input with the same contentLanguage, + offerId, and dataSource already exists, this method + replaces that entry. + + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + Returns: + Callable[[~.InsertProductInputRequest], + ~.ProductInput]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "insert_product_input" not in self._stubs: + self._stubs["insert_product_input"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductInputsService/InsertProductInput", + request_serializer=productinputs.InsertProductInputRequest.serialize, + response_deserializer=productinputs.ProductInput.deserialize, + ) + return self._stubs["insert_product_input"] + + @property + def delete_product_input( + self, + ) -> Callable[[productinputs.DeleteProductInputRequest], empty_pb2.Empty]: + r"""Return a callable for the delete product input method over gRPC. + + Deletes a product input from your Merchant Center + account. + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + Returns: + Callable[[~.DeleteProductInputRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_product_input" not in self._stubs: + self._stubs["delete_product_input"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductInputsService/DeleteProductInput", + request_serializer=productinputs.DeleteProductInputRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_product_input"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProductInputsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3554c75c29b8 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/grpc_asyncio.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_products_v1beta.types import productinputs + +from .base import DEFAULT_CLIENT_INFO, ProductInputsServiceTransport +from .grpc import ProductInputsServiceGrpcTransport + + +class ProductInputsServiceGrpcAsyncIOTransport(ProductInputsServiceTransport): + """gRPC AsyncIO backend transport for ProductInputsService. + + Service to use ProductInput resource. + This service works for products with online channel only. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def insert_product_input( + self, + ) -> Callable[ + [productinputs.InsertProductInputRequest], Awaitable[productinputs.ProductInput] + ]: + r"""Return a callable for the insert product input method over gRPC. + + Uploads a product input to your Merchant Center + account. If an input with the same contentLanguage, + offerId, and dataSource already exists, this method + replaces that entry. + + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + Returns: + Callable[[~.InsertProductInputRequest], + Awaitable[~.ProductInput]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "insert_product_input" not in self._stubs: + self._stubs["insert_product_input"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductInputsService/InsertProductInput", + request_serializer=productinputs.InsertProductInputRequest.serialize, + response_deserializer=productinputs.ProductInput.deserialize, + ) + return self._stubs["insert_product_input"] + + @property + def delete_product_input( + self, + ) -> Callable[ + [productinputs.DeleteProductInputRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete product input method over gRPC. + + Deletes a product input from your Merchant Center + account. + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + Returns: + Callable[[~.DeleteProductInputRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_product_input" not in self._stubs: + self._stubs["delete_product_input"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductInputsService/DeleteProductInput", + request_serializer=productinputs.DeleteProductInputRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_product_input"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.insert_product_input: gapic_v1.method_async.wrap_method( + self.insert_product_input, + default_timeout=None, + client_info=client_info, + ), + self.delete_product_input: gapic_v1.method_async.wrap_method( + self.delete_product_input, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ProductInputsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/rest.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/rest.py new file mode 100644 index 000000000000..f5d9521f55b8 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/transports/rest.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.shopping.merchant_products_v1beta.types import productinputs + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProductInputsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProductInputsServiceRestInterceptor: + """Interceptor for ProductInputsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProductInputsServiceRestTransport. + + .. code-block:: python + class MyCustomProductInputsServiceInterceptor(ProductInputsServiceRestInterceptor): + def pre_delete_product_input(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_insert_product_input(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert_product_input(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProductInputsServiceRestTransport(interceptor=MyCustomProductInputsServiceInterceptor()) + client = ProductInputsServiceClient(transport=transport) + + + """ + + def pre_delete_product_input( + self, + request: productinputs.DeleteProductInputRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[productinputs.DeleteProductInputRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_product_input + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProductInputsService server. + """ + return request, metadata + + def pre_insert_product_input( + self, + request: productinputs.InsertProductInputRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[productinputs.InsertProductInputRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert_product_input + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProductInputsService server. + """ + return request, metadata + + def post_insert_product_input( + self, response: productinputs.ProductInput + ) -> productinputs.ProductInput: + """Post-rpc interceptor for insert_product_input + + Override in a subclass to manipulate the response + after it is returned by the ProductInputsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProductInputsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProductInputsServiceRestInterceptor + + +class ProductInputsServiceRestTransport(ProductInputsServiceTransport): + """REST backend transport for ProductInputsService. + + Service to use ProductInput resource. + This service works for products with online channel only. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProductInputsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProductInputsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeleteProductInput(ProductInputsServiceRestStub): + def __hash__(self): + return hash("DeleteProductInput") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataSource": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: productinputs.DeleteProductInputRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete product input method over HTTP. + + Args: + request (~.productinputs.DeleteProductInputRequest): + The request object. Request message for the + DeleteProductInput method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/products/v1beta/{name=accounts/*/productInputs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_product_input( + request, metadata + ) + pb_request = productinputs.DeleteProductInputRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _InsertProductInput(ProductInputsServiceRestStub): + def __hash__(self): + return hash("InsertProductInput") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataSource": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: productinputs.InsertProductInputRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> productinputs.ProductInput: + r"""Call the insert product input method over HTTP. + + Args: + request (~.productinputs.InsertProductInputRequest): + The request object. Request message for the + InsertProductInput method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.productinputs.ProductInput: + This resource represents input data you submit for a + product, not the processed product that you see in + Merchant Center, in Shopping ads, or across Google + surfaces. Product inputs, rules and supplemental data + source data are combined to create the processed + [product][google.shopping.content.bundles.Products.Product]. + + Required product input attributes to pass data + validation checks are primarily defined in the `Products + Data + Specification `__. + + The following attributes are required: + [feedLabel][google.shopping.content.bundles.Products.feed_label], + [contentLanguage][google.shopping.content.bundles.Products.content_language] + and + [offerId][google.shopping.content.bundles.Products.offer_id]. + + After inserting, updating, or deleting a product input, + it may take several minutes before the processed product + can be retrieved. + + All fields in the product input and its sub-messages + match the English name of their corresponding attribute + in the vertical spec with `some + exceptions `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/products/v1beta/{parent=accounts/*}/productInputs:insert", + "body": "product_input", + }, + ] + request, metadata = self._interceptor.pre_insert_product_input( + request, metadata + ) + pb_request = productinputs.InsertProductInputRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = productinputs.ProductInput() + pb_resp = productinputs.ProductInput.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert_product_input(resp) + return resp + + @property + def delete_product_input( + self, + ) -> Callable[[productinputs.DeleteProductInputRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteProductInput(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert_product_input( + self, + ) -> Callable[ + [productinputs.InsertProductInputRequest], productinputs.ProductInput + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InsertProductInput(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProductInputsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/__init__.py new file mode 100644 index 000000000000..45cc600b1170 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ProductsServiceAsyncClient +from .client import ProductsServiceClient + +__all__ = ( + "ProductsServiceClient", + "ProductsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py new file mode 100644 index 000000000000..3d71669aa157 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_products_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.type.types import types + +from google.shopping.merchant_products_v1beta.services.products_service import pagers +from google.shopping.merchant_products_v1beta.types import products, products_common + +from .client import ProductsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ProductsServiceTransport +from .transports.grpc_asyncio import ProductsServiceGrpcAsyncIOTransport + + +class ProductsServiceAsyncClient: + """Service to use Product resource. + This service works for products with online channel only. + """ + + _client: ProductsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProductsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProductsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProductsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProductsServiceClient._DEFAULT_UNIVERSE + + product_path = staticmethod(ProductsServiceClient.product_path) + parse_product_path = staticmethod(ProductsServiceClient.parse_product_path) + common_billing_account_path = staticmethod( + ProductsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProductsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProductsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ProductsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ProductsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ProductsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProductsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ProductsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ProductsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ProductsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductsServiceAsyncClient: The constructed client. + """ + return ProductsServiceClient.from_service_account_info.__func__(ProductsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductsServiceAsyncClient: The constructed client. + """ + return ProductsServiceClient.from_service_account_file.__func__(ProductsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProductsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProductsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProductsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ProductsServiceClient).get_transport_class, type(ProductsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ProductsServiceTransport, Callable[..., ProductsServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the products service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProductsServiceTransport,Callable[..., ProductsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProductsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProductsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_product( + self, + request: Optional[Union[products.GetProductRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> products.Product: + r"""Retrieves the processed product from your Merchant + Center account. + After inserting, updating, or deleting a product input, + it may take several minutes before the updated final + product can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + async def sample_get_product(): + # Create a client + client = merchant_products_v1beta.ProductsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.GetProductRequest( + name="name_value", + ) + + # Make the request + response = await client.get_product(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_products_v1beta.types.GetProductRequest, dict]]): + The request object. Request message for the GetProduct + method. + name (:class:`str`): + Required. The name of the product to retrieve. Format: + ``accounts/{account}/products/{product}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_products_v1beta.types.Product: + The processed product, built from multiple [product + inputs][[google.shopping.content.bundles.Products.ProductInput] + after applying rules and supplemental data sources. + This processed product matches what is shown in your + Merchant Center account and in Shopping ads and other + surfaces across Google. Each product is built from + exactly one primary data source product input, and + multiple supplemental data source inputs. After + inserting, updating, or deleting a product input, it + may take several minutes before the updated processed + product can be retrieved. + + All fields in the processed product and its + sub-messages match the name of their corresponding + attribute in the [Product data + specification](\ https://support.google.com/merchants/answer/7052112) + with some exceptions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, products.GetProductRequest): + request = products.GetProductRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_product + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_products( + self, + request: Optional[Union[products.ListProductsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProductsAsyncPager: + r"""Lists the processed products in your Merchant Center + account. The response might contain fewer items than + specified by pageSize. Rely on pageToken to determine if + there are more items to be requested. + + After inserting, updating, or deleting a product input, + it may take several minutes before the updated processed + product can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + async def sample_list_products(): + # Create a client + client = merchant_products_v1beta.ProductsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.ListProductsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_products(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_products_v1beta.types.ListProductsRequest, dict]]): + The request object. Request message for the ListProducts + method. + parent (:class:`str`): + Required. The account to list + processed products for. Format: + accounts/{account} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_products_v1beta.services.products_service.pagers.ListProductsAsyncPager: + Response message for the ListProducts + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, products.ListProductsRequest): + request = products.ListProductsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_products + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListProductsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ProductsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProductsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py new file mode 100644 index 000000000000..0b51963b8dcf --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py @@ -0,0 +1,946 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_products_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.type.types import types + +from google.shopping.merchant_products_v1beta.services.products_service import pagers +from google.shopping.merchant_products_v1beta.types import products, products_common + +from .transports.base import DEFAULT_CLIENT_INFO, ProductsServiceTransport +from .transports.grpc import ProductsServiceGrpcTransport +from .transports.grpc_asyncio import ProductsServiceGrpcAsyncIOTransport +from .transports.rest import ProductsServiceRestTransport + + +class ProductsServiceClientMeta(type): + """Metaclass for the ProductsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ProductsServiceTransport]] + _transport_registry["grpc"] = ProductsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ProductsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ProductsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProductsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProductsServiceClient(metaclass=ProductsServiceClientMeta): + """Service to use Product resource. + This service works for products with online channel only. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProductsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProductsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ProductsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def product_path( + account: str, + product: str, + ) -> str: + """Returns a fully-qualified product string.""" + return "accounts/{account}/products/{product}".format( + account=account, + product=product, + ) + + @staticmethod + def parse_product_path(path: str) -> Dict[str, str]: + """Parses a product path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/products/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProductsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProductsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProductsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProductsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProductsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProductsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ProductsServiceTransport, Callable[..., ProductsServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the products service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProductsServiceTransport,Callable[..., ProductsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProductsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProductsServiceClient._read_environment_variables() + self._client_cert_source = ProductsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProductsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProductsServiceTransport) + if transport_provided: + # transport is a ProductsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProductsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ProductsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProductsServiceTransport], Callable[..., ProductsServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProductsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_product( + self, + request: Optional[Union[products.GetProductRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> products.Product: + r"""Retrieves the processed product from your Merchant + Center account. + After inserting, updating, or deleting a product input, + it may take several minutes before the updated final + product can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + def sample_get_product(): + # Create a client + client = merchant_products_v1beta.ProductsServiceClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.GetProductRequest( + name="name_value", + ) + + # Make the request + response = client.get_product(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_products_v1beta.types.GetProductRequest, dict]): + The request object. Request message for the GetProduct + method. + name (str): + Required. The name of the product to retrieve. Format: + ``accounts/{account}/products/{product}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_products_v1beta.types.Product: + The processed product, built from multiple [product + inputs][[google.shopping.content.bundles.Products.ProductInput] + after applying rules and supplemental data sources. + This processed product matches what is shown in your + Merchant Center account and in Shopping ads and other + surfaces across Google. Each product is built from + exactly one primary data source product input, and + multiple supplemental data source inputs. After + inserting, updating, or deleting a product input, it + may take several minutes before the updated processed + product can be retrieved. + + All fields in the processed product and its + sub-messages match the name of their corresponding + attribute in the [Product data + specification](\ https://support.google.com/merchants/answer/7052112) + with some exceptions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, products.GetProductRequest): + request = products.GetProductRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_product] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_products( + self, + request: Optional[Union[products.ListProductsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProductsPager: + r"""Lists the processed products in your Merchant Center + account. The response might contain fewer items than + specified by pageSize. Rely on pageToken to determine if + there are more items to be requested. + + After inserting, updating, or deleting a product input, + it may take several minutes before the updated processed + product can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_products_v1beta + + def sample_list_products(): + # Create a client + client = merchant_products_v1beta.ProductsServiceClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.ListProductsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_products(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_products_v1beta.types.ListProductsRequest, dict]): + The request object. Request message for the ListProducts + method. + parent (str): + Required. The account to list + processed products for. Format: + accounts/{account} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_products_v1beta.services.products_service.pagers.ListProductsPager: + Response message for the ListProducts + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, products.ListProductsRequest): + request = products.ListProductsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_products] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListProductsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ProductsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProductsServiceClient",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/pagers.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/pagers.py new file mode 100644 index 000000000000..487976309634 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_products_v1beta.types import products + + +class ListProductsPager: + """A pager for iterating through ``list_products`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_products_v1beta.types.ListProductsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``products`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListProducts`` requests and continue to iterate + through the ``products`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_products_v1beta.types.ListProductsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., products.ListProductsResponse], + request: products.ListProductsRequest, + response: products.ListProductsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_products_v1beta.types.ListProductsRequest): + The initial request object. + response (google.shopping.merchant_products_v1beta.types.ListProductsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = products.ListProductsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[products.ListProductsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[products.Product]: + for page in self.pages: + yield from page.products + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListProductsAsyncPager: + """A pager for iterating through ``list_products`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_products_v1beta.types.ListProductsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``products`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListProducts`` requests and continue to iterate + through the ``products`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_products_v1beta.types.ListProductsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[products.ListProductsResponse]], + request: products.ListProductsRequest, + response: products.ListProductsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_products_v1beta.types.ListProductsRequest): + The initial request object. + response (google.shopping.merchant_products_v1beta.types.ListProductsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = products.ListProductsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[products.ListProductsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[products.Product]: + async def async_generator(): + async for page in self.pages: + for response in page.products: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/__init__.py new file mode 100644 index 000000000000..f8057ff0cf87 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProductsServiceTransport +from .grpc import ProductsServiceGrpcTransport +from .grpc_asyncio import ProductsServiceGrpcAsyncIOTransport +from .rest import ProductsServiceRestInterceptor, ProductsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ProductsServiceTransport]] +_transport_registry["grpc"] = ProductsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ProductsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ProductsServiceRestTransport + +__all__ = ( + "ProductsServiceTransport", + "ProductsServiceGrpcTransport", + "ProductsServiceGrpcAsyncIOTransport", + "ProductsServiceRestTransport", + "ProductsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/base.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/base.py new file mode 100644 index 000000000000..2663e1366a8f --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_products_v1beta import gapic_version as package_version +from google.shopping.merchant_products_v1beta.types import products + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProductsServiceTransport(abc.ABC): + """Abstract transport class for ProductsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_product: gapic_v1.method.wrap_method( + self.get_product, + default_timeout=None, + client_info=client_info, + ), + self.list_products: gapic_v1.method.wrap_method( + self.list_products, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_product( + self, + ) -> Callable[ + [products.GetProductRequest], + Union[products.Product, Awaitable[products.Product]], + ]: + raise NotImplementedError() + + @property + def list_products( + self, + ) -> Callable[ + [products.ListProductsRequest], + Union[products.ListProductsResponse, Awaitable[products.ListProductsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProductsServiceTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/grpc.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/grpc.py new file mode 100644 index 000000000000..ba1db8627a0e --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/grpc.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_products_v1beta.types import products + +from .base import DEFAULT_CLIENT_INFO, ProductsServiceTransport + + +class ProductsServiceGrpcTransport(ProductsServiceTransport): + """gRPC backend transport for ProductsService. + + Service to use Product resource. + This service works for products with online channel only. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_product(self) -> Callable[[products.GetProductRequest], products.Product]: + r"""Return a callable for the get product method over gRPC. + + Retrieves the processed product from your Merchant + Center account. + After inserting, updating, or deleting a product input, + it may take several minutes before the updated final + product can be retrieved. + + Returns: + Callable[[~.GetProductRequest], + ~.Product]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_product" not in self._stubs: + self._stubs["get_product"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductsService/GetProduct", + request_serializer=products.GetProductRequest.serialize, + response_deserializer=products.Product.deserialize, + ) + return self._stubs["get_product"] + + @property + def list_products( + self, + ) -> Callable[[products.ListProductsRequest], products.ListProductsResponse]: + r"""Return a callable for the list products method over gRPC. + + Lists the processed products in your Merchant Center + account. The response might contain fewer items than + specified by pageSize. Rely on pageToken to determine if + there are more items to be requested. + + After inserting, updating, or deleting a product input, + it may take several minutes before the updated processed + product can be retrieved. + + Returns: + Callable[[~.ListProductsRequest], + ~.ListProductsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_products" not in self._stubs: + self._stubs["list_products"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductsService/ListProducts", + request_serializer=products.ListProductsRequest.serialize, + response_deserializer=products.ListProductsResponse.deserialize, + ) + return self._stubs["list_products"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProductsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..52d489f4ad98 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/grpc_asyncio.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_products_v1beta.types import products + +from .base import DEFAULT_CLIENT_INFO, ProductsServiceTransport +from .grpc import ProductsServiceGrpcTransport + + +class ProductsServiceGrpcAsyncIOTransport(ProductsServiceTransport): + """gRPC AsyncIO backend transport for ProductsService. + + Service to use Product resource. + This service works for products with online channel only. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_product( + self, + ) -> Callable[[products.GetProductRequest], Awaitable[products.Product]]: + r"""Return a callable for the get product method over gRPC. + + Retrieves the processed product from your Merchant + Center account. + After inserting, updating, or deleting a product input, + it may take several minutes before the updated final + product can be retrieved. + + Returns: + Callable[[~.GetProductRequest], + Awaitable[~.Product]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_product" not in self._stubs: + self._stubs["get_product"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductsService/GetProduct", + request_serializer=products.GetProductRequest.serialize, + response_deserializer=products.Product.deserialize, + ) + return self._stubs["get_product"] + + @property + def list_products( + self, + ) -> Callable[ + [products.ListProductsRequest], Awaitable[products.ListProductsResponse] + ]: + r"""Return a callable for the list products method over gRPC. + + Lists the processed products in your Merchant Center + account. The response might contain fewer items than + specified by pageSize. Rely on pageToken to determine if + there are more items to be requested. + + After inserting, updating, or deleting a product input, + it may take several minutes before the updated processed + product can be retrieved. + + Returns: + Callable[[~.ListProductsRequest], + Awaitable[~.ListProductsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_products" not in self._stubs: + self._stubs["list_products"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.products.v1beta.ProductsService/ListProducts", + request_serializer=products.ListProductsRequest.serialize, + response_deserializer=products.ListProductsResponse.deserialize, + ) + return self._stubs["list_products"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_product: gapic_v1.method_async.wrap_method( + self.get_product, + default_timeout=None, + client_info=client_info, + ), + self.list_products: gapic_v1.method_async.wrap_method( + self.list_products, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ProductsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/rest.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/rest.py new file mode 100644 index 000000000000..c8996eaf4e1c --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/transports/rest.py @@ -0,0 +1,439 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_products_v1beta.types import products + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProductsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProductsServiceRestInterceptor: + """Interceptor for ProductsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProductsServiceRestTransport. + + .. code-block:: python + class MyCustomProductsServiceInterceptor(ProductsServiceRestInterceptor): + def pre_get_product(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_product(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_products(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_products(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProductsServiceRestTransport(interceptor=MyCustomProductsServiceInterceptor()) + client = ProductsServiceClient(transport=transport) + + + """ + + def pre_get_product( + self, request: products.GetProductRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[products.GetProductRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_product + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProductsService server. + """ + return request, metadata + + def post_get_product(self, response: products.Product) -> products.Product: + """Post-rpc interceptor for get_product + + Override in a subclass to manipulate the response + after it is returned by the ProductsService server but before + it is returned to user code. + """ + return response + + def pre_list_products( + self, request: products.ListProductsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[products.ListProductsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_products + + Override in a subclass to manipulate the request or metadata + before they are sent to the ProductsService server. + """ + return request, metadata + + def post_list_products( + self, response: products.ListProductsResponse + ) -> products.ListProductsResponse: + """Post-rpc interceptor for list_products + + Override in a subclass to manipulate the response + after it is returned by the ProductsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProductsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProductsServiceRestInterceptor + + +class ProductsServiceRestTransport(ProductsServiceTransport): + """REST backend transport for ProductsService. + + Service to use Product resource. + This service works for products with online channel only. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProductsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProductsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetProduct(ProductsServiceRestStub): + def __hash__(self): + return hash("GetProduct") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: products.GetProductRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> products.Product: + r"""Call the get product method over HTTP. + + Args: + request (~.products.GetProductRequest): + The request object. Request message for the GetProduct + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.products.Product: + The processed product, built from multiple [product + inputs][[google.shopping.content.bundles.Products.ProductInput] + after applying rules and supplemental data sources. This + processed product matches what is shown in your Merchant + Center account and in Shopping ads and other surfaces + across Google. Each product is built from exactly one + primary data source product input, and multiple + supplemental data source inputs. After inserting, + updating, or deleting a product input, it may take + several minutes before the updated processed product can + be retrieved. + + All fields in the processed product and its sub-messages + match the name of their corresponding attribute in the + `Product data + specification `__ + with some exceptions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/products/v1beta/{name=accounts/*/products/*}", + }, + ] + request, metadata = self._interceptor.pre_get_product(request, metadata) + pb_request = products.GetProductRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = products.Product() + pb_resp = products.Product.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_product(resp) + return resp + + class _ListProducts(ProductsServiceRestStub): + def __hash__(self): + return hash("ListProducts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: products.ListProductsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> products.ListProductsResponse: + r"""Call the list products method over HTTP. + + Args: + request (~.products.ListProductsRequest): + The request object. Request message for the ListProducts + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.products.ListProductsResponse: + Response message for the ListProducts + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/products/v1beta/{parent=accounts/*}/products", + }, + ] + request, metadata = self._interceptor.pre_list_products(request, metadata) + pb_request = products.ListProductsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = products.ListProductsResponse() + pb_resp = products.ListProductsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_products(resp) + return resp + + @property + def get_product(self) -> Callable[[products.GetProductRequest], products.Product]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProduct(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_products( + self, + ) -> Callable[[products.ListProductsRequest], products.ListProductsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProducts(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProductsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/__init__.py new file mode 100644 index 000000000000..87b39c31ab38 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .productinputs import ( + DeleteProductInputRequest, + InsertProductInputRequest, + ProductInput, +) +from .products import ( + GetProductRequest, + ListProductsRequest, + ListProductsResponse, + Product, +) +from .products_common import ( + Attributes, + Certification, + CloudExportAdditionalProperties, + FreeShippingThreshold, + Installment, + LoyaltyPoints, + LoyaltyProgram, + ProductDetail, + ProductDimension, + ProductStatus, + ProductStructuredDescription, + ProductStructuredTitle, + ProductWeight, + Shipping, + ShippingDimension, + ShippingWeight, + SubscriptionCost, + SubscriptionPeriod, + Tax, + UnitPricingBaseMeasure, + UnitPricingMeasure, +) + +__all__ = ( + "DeleteProductInputRequest", + "InsertProductInputRequest", + "ProductInput", + "GetProductRequest", + "ListProductsRequest", + "ListProductsResponse", + "Product", + "Attributes", + "Certification", + "CloudExportAdditionalProperties", + "FreeShippingThreshold", + "Installment", + "LoyaltyPoints", + "LoyaltyProgram", + "ProductDetail", + "ProductDimension", + "ProductStatus", + "ProductStructuredDescription", + "ProductStructuredTitle", + "ProductWeight", + "Shipping", + "ShippingDimension", + "ShippingWeight", + "SubscriptionCost", + "Tax", + "UnitPricingBaseMeasure", + "UnitPricingMeasure", + "SubscriptionPeriod", +) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/productinputs.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/productinputs.py new file mode 100644 index 000000000000..15947bd03bfb --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/productinputs.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.shopping.type.types import types +import proto # type: ignore + +from google.shopping.merchant_products_v1beta.types import products_common + +__protobuf__ = proto.module( + package="google.shopping.merchant.products.v1beta", + manifest={ + "ProductInput", + "InsertProductInputRequest", + "DeleteProductInputRequest", + }, +) + + +class ProductInput(proto.Message): + r"""This resource represents input data you submit for a product, not + the processed product that you see in Merchant Center, in Shopping + ads, or across Google surfaces. Product inputs, rules and + supplemental data source data are combined to create the processed + [product][google.shopping.content.bundles.Products.Product]. + + Required product input attributes to pass data validation checks are + primarily defined in the `Products Data + Specification `__. + + The following attributes are required: + [feedLabel][google.shopping.content.bundles.Products.feed_label], + [contentLanguage][google.shopping.content.bundles.Products.content_language] + and [offerId][google.shopping.content.bundles.Products.offer_id]. + + After inserting, updating, or deleting a product input, it may take + several minutes before the processed product can be retrieved. + + All fields in the product input and its sub-messages match the + English name of their corresponding attribute in the vertical spec + with `some + exceptions `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The name of the product input. Format: + ``"{productinput.name=accounts/{account}/productInputs/{productinput}}"`` + product (str): + Output only. The name of the processed product. Format: + ``"{product.name=accounts/{account}/products/{product}}"`` + channel (google.shopping.type.types.Channel.ChannelEnum): + Required. Immutable. The + `channel `__ + of the product. + offer_id (str): + Required. Immutable. Your unique identifier for the product. + This is the same for the product input and processed + product. Leading and trailing whitespaces are stripped and + multiple whitespaces are replaced by a single whitespace + upon submission. See the `products data + specification `__ + for details. + content_language (str): + Required. Immutable. The two-letter `ISO + 639-1 `__ language + code for the product. + feed_label (str): + Required. Immutable. The `feed + label `__ + for the product. + version_number (int): + Optional. Represents the existing version (freshness) of the + product, which can be used to preserve the right order when + multiple updates are done at the same time. + + If set, the insertion is prevented when version number is + lower than the current version number of the existing + product. Re-insertion (for example, product refresh after 30 + days) can be performed with the current ``version_number``. + + Only supported for insertions into primary data sources. + + If the operation is prevented, the aborted exception will be + thrown. + + This field is a member of `oneof`_ ``_version_number``. + attributes (google.shopping.merchant_products_v1beta.types.Attributes): + Optional. A list of product attributes. + custom_attributes (MutableSequence[google.shopping.type.types.CustomAttribute]): + Optional. A list of custom (merchant-provided) attributes. + It can also be used for submitting any attribute of the data + specification in its generic form (for example, + ``{ "name": "size type", "value": "regular" }``). This is + useful for submitting attributes not explicitly exposed by + the API, such as additional attributes used for Buy on + Google. Maximum allowed number of characters for each custom + attribute is 10240 (represents sum of characters for name + and value). Maximum 2500 custom attributes can be set per + product, with total size of 102.4kB. Underscores in custom + attribute names are replaced by spaces upon insertion. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + product: str = proto.Field( + proto.STRING, + number=2, + ) + channel: types.Channel.ChannelEnum = proto.Field( + proto.ENUM, + number=3, + enum=types.Channel.ChannelEnum, + ) + offer_id: str = proto.Field( + proto.STRING, + number=4, + ) + content_language: str = proto.Field( + proto.STRING, + number=5, + ) + feed_label: str = proto.Field( + proto.STRING, + number=6, + ) + version_number: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + attributes: products_common.Attributes = proto.Field( + proto.MESSAGE, + number=8, + message=products_common.Attributes, + ) + custom_attributes: MutableSequence[types.CustomAttribute] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=types.CustomAttribute, + ) + + +class InsertProductInputRequest(proto.Message): + r"""Request message for the InsertProductInput method. + + Attributes: + parent (str): + Required. The account where this product will + be inserted. Format: accounts/{account} + product_input (google.shopping.merchant_products_v1beta.types.ProductInput): + Required. The product input to insert. + data_source (str): + Required. The primary or supplemental product data source + name. If the product already exists and data source provided + is different, then the product will be moved to a new data + source. Format: + ``accounts/{account}/dataSources/{datasource}``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + product_input: "ProductInput" = proto.Field( + proto.MESSAGE, + number=2, + message="ProductInput", + ) + data_source: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteProductInputRequest(proto.Message): + r"""Request message for the DeleteProductInput method. + + Attributes: + name (str): + Required. The name of the product input + resource to delete. Format: + accounts/{account}/productInputs/{product} + data_source (str): + Required. The primary or supplemental data source from which + the product input should be deleted. Format: + ``accounts/{account}/dataSources/{datasource}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/products.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/products.py new file mode 100644 index 000000000000..ba579989dfe0 --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/products.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.shopping.type.types import types +import proto # type: ignore + +from google.shopping.merchant_products_v1beta.types import products_common + +__protobuf__ = proto.module( + package="google.shopping.merchant.products.v1beta", + manifest={ + "Product", + "GetProductRequest", + "ListProductsRequest", + "ListProductsResponse", + }, +) + + +class Product(proto.Message): + r"""The processed product, built from multiple [product + inputs][[google.shopping.content.bundles.Products.ProductInput] + after applying rules and supplemental data sources. This processed + product matches what is shown in your Merchant Center account and in + Shopping ads and other surfaces across Google. Each product is built + from exactly one primary data source product input, and multiple + supplemental data source inputs. After inserting, updating, or + deleting a product input, it may take several minutes before the + updated processed product can be retrieved. + + All fields in the processed product and its sub-messages match the + name of their corresponding attribute in the `Product data + specification `__ + with some exceptions. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of the product. Format: + ``"{product.name=accounts/{account}/products/{product}}"`` + channel (google.shopping.type.types.Channel.ChannelEnum): + Output only. The + `channel `__ + of the product. + offer_id (str): + Output only. Your unique identifier for the product. This is + the same for the product input and processed product. + Leading and trailing whitespaces are stripped and multiple + whitespaces are replaced by a single whitespace upon + submission. See the `product data + specification `__ + for details. + content_language (str): + Output only. The two-letter `ISO + 639-1 `__ language + code for the product. + feed_label (str): + Output only. The feed label for the product. + data_source (str): + Output only. The primary data source of the + product. + version_number (int): + Output only. Represents the existing version (freshness) of + the product, which can be used to preserve the right order + when multiple updates are done at the same time. + + If set, the insertion is prevented when version number is + lower than the current version number of the existing + product. Re-insertion (for example, product refresh after 30 + days) can be performed with the current ``version_number``. + + Only supported for insertions into primary data sources. + + If the operation is prevented, the aborted exception will be + thrown. + + This field is a member of `oneof`_ ``_version_number``. + attributes (google.shopping.merchant_products_v1beta.types.Attributes): + Output only. A list of product attributes. + custom_attributes (MutableSequence[google.shopping.type.types.CustomAttribute]): + Output only. A list of custom (merchant-provided) + attributes. It can also be used to submit any attribute of + the data specification in its generic form (for example, + ``{ "name": "size type", "value": "regular" }``). This is + useful for submitting attributes not explicitly exposed by + the API, such as additional attributes used for Buy on + Google. + product_status (google.shopping.merchant_products_v1beta.types.ProductStatus): + Output only. The status of a product, data + validation issues, that is, information about a + product computed asynchronously. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + channel: types.Channel.ChannelEnum = proto.Field( + proto.ENUM, + number=2, + enum=types.Channel.ChannelEnum, + ) + offer_id: str = proto.Field( + proto.STRING, + number=3, + ) + content_language: str = proto.Field( + proto.STRING, + number=4, + ) + feed_label: str = proto.Field( + proto.STRING, + number=5, + ) + data_source: str = proto.Field( + proto.STRING, + number=6, + ) + version_number: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + attributes: products_common.Attributes = proto.Field( + proto.MESSAGE, + number=8, + message=products_common.Attributes, + ) + custom_attributes: MutableSequence[types.CustomAttribute] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=types.CustomAttribute, + ) + product_status: products_common.ProductStatus = proto.Field( + proto.MESSAGE, + number=10, + message=products_common.ProductStatus, + ) + + +class GetProductRequest(proto.Message): + r"""Request message for the GetProduct method. + + Attributes: + name (str): + Required. The name of the product to retrieve. Format: + ``accounts/{account}/products/{product}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListProductsRequest(proto.Message): + r"""Request message for the ListProducts method. + + Attributes: + parent (str): + Required. The account to list processed + products for. Format: accounts/{account} + page_size (int): + The maximum number of products to return. The + service may return fewer than this value. + The maximum value is 1000; values above 1000 + will be coerced to 1000. If unspecified, the + maximum number of products will be returned. + page_token (str): + A page token, received from a previous ``ListProducts`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListProducts`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListProductsResponse(proto.Message): + r"""Response message for the ListProducts method. + + Attributes: + products (MutableSequence[google.shopping.merchant_products_v1beta.types.Product]): + The processed products from the specified + account. These are your processed products after + applying rules and supplemental data sources. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + products: MutableSequence["Product"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Product", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/products_common.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/products_common.py new file mode 100644 index 000000000000..258eb79ef48a --- /dev/null +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/types/products_common.py @@ -0,0 +1,1909 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.shopping.type.types import types +from google.type import interval_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.products.v1beta", + manifest={ + "SubscriptionPeriod", + "Attributes", + "Tax", + "ShippingWeight", + "ShippingDimension", + "UnitPricingBaseMeasure", + "UnitPricingMeasure", + "SubscriptionCost", + "Installment", + "LoyaltyPoints", + "LoyaltyProgram", + "Shipping", + "FreeShippingThreshold", + "ProductDetail", + "Certification", + "ProductStructuredTitle", + "ProductStructuredDescription", + "ProductDimension", + "ProductWeight", + "ProductStatus", + "CloudExportAdditionalProperties", + }, +) + + +class SubscriptionPeriod(proto.Enum): + r"""The subscription period of the product. + + Values: + SUBSCRIPTION_PERIOD_UNSPECIFIED (0): + Indicates that the subscription period is + unspecified. + MONTH (1): + Indicates that the subscription period is + month. + YEAR (2): + Indicates that the subscription period is + year. + """ + SUBSCRIPTION_PERIOD_UNSPECIFIED = 0 + MONTH = 1 + YEAR = 2 + + +class Attributes(proto.Message): + r"""Attributes. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + identifier_exists (bool): + Set this value to false when the item does + not have unique product identifiers appropriate + to its category, such as GTIN, MPN, and brand. + Defaults to true, if not provided. + + This field is a member of `oneof`_ ``_identifier_exists``. + is_bundle (bool): + Whether the item is a merchant-defined + bundle. A bundle is a custom grouping of + different products sold by a merchant for a + single price. + + This field is a member of `oneof`_ ``_is_bundle``. + title (str): + Title of the item. + + This field is a member of `oneof`_ ``_title``. + description (str): + Description of the item. + + This field is a member of `oneof`_ ``_description``. + link (str): + URL directly linking to your item's page on + your online store. + + This field is a member of `oneof`_ ``_link``. + mobile_link (str): + URL for the mobile-optimized version of your + item's landing page. + + This field is a member of `oneof`_ ``_mobile_link``. + canonical_link (str): + URL for the canonical version of your item's + landing page. + + This field is a member of `oneof`_ ``_canonical_link``. + image_link (str): + URL of an image of the item. + + This field is a member of `oneof`_ ``_image_link``. + additional_image_links (MutableSequence[str]): + Additional URLs of images of the item. + expiration_date (google.protobuf.timestamp_pb2.Timestamp): + Date on which the item should expire, as specified upon + insertion, in `ISO + 8601 `__ format. The + actual expiration date is exposed in ``productstatuses`` as + `googleExpirationDate `__ + and might be earlier if ``expirationDate`` is too far in the + future. + disclosure_date (google.protobuf.timestamp_pb2.Timestamp): + The date time when an offer becomes visible in search + results across Google’s YouTube surfaces, in `ISO + 8601 `__ format. See + `Disclosure + date `__ + for more information. + adult (bool): + Set to true if the item is targeted towards + adults. + + This field is a member of `oneof`_ ``_adult``. + age_group (str): + Target `age + group `__ + of the item. + + This field is a member of `oneof`_ ``_age_group``. + availability (str): + Availability status of the item. + + This field is a member of `oneof`_ ``_availability``. + availability_date (google.protobuf.timestamp_pb2.Timestamp): + The day a pre-ordered product becomes available for + delivery, in `ISO + 8601 `__ format. + brand (str): + Brand of the item. + + This field is a member of `oneof`_ ``_brand``. + color (str): + Color of the item. + + This field is a member of `oneof`_ ``_color``. + condition (str): + Condition or state of the item. + + This field is a member of `oneof`_ ``_condition``. + gender (str): + Target gender of the item. + + This field is a member of `oneof`_ ``_gender``. + google_product_category (str): + Google's category of the item (see `Google product + taxonomy `__). + When querying products, this field will contain the user + provided value. There is currently no way to get back the + auto assigned google product categories through the API. + + This field is a member of `oneof`_ ``_google_product_category``. + gtin (str): + Global Trade Item Number + (`GTIN `__) + of the item. + + This field is a member of `oneof`_ ``_gtin``. + item_group_id (str): + Shared identifier for all variants of the + same product. + + This field is a member of `oneof`_ ``_item_group_id``. + material (str): + The material of which the item is made. + + This field is a member of `oneof`_ ``_material``. + mpn (str): + Manufacturer Part Number + (`MPN `__) + of the item. + + This field is a member of `oneof`_ ``_mpn``. + pattern (str): + The item's pattern (for example, polka dots). + + This field is a member of `oneof`_ ``_pattern``. + price (google.shopping.type.types.Price): + Price of the item. + installment (google.shopping.merchant_products_v1beta.types.Installment): + Number and amount of installments to pay for + an item. + subscription_cost (google.shopping.merchant_products_v1beta.types.SubscriptionCost): + Number of periods (months or years) and + amount of payment per period for an item with an + associated subscription contract. + loyalty_points (google.shopping.merchant_products_v1beta.types.LoyaltyPoints): + Loyalty points that users receive after + purchasing the item. Japan only. + loyalty_programs (MutableSequence[google.shopping.merchant_products_v1beta.types.LoyaltyProgram]): + A list of loyalty program information that is + used to surface loyalty benefits (for example, + better pricing, points, etc) to the user of this + item. + product_types (MutableSequence[str]): + Categories of the item (formatted as in `product data + specification `__). + sale_price (google.shopping.type.types.Price): + Advertised sale price of the item. + sale_price_effective_date (google.type.interval_pb2.Interval): + Date range during which the item is on sale (see `product + data + specification `__). + sell_on_google_quantity (int): + The quantity of the product that is available + for selling on Google. Supported only for online + products. + + This field is a member of `oneof`_ ``_sell_on_google_quantity``. + product_height (google.shopping.merchant_products_v1beta.types.ProductDimension): + The height of the product in the units + provided. The value must be between + 0 (exclusive) and 3000 (inclusive). + product_length (google.shopping.merchant_products_v1beta.types.ProductDimension): + The length of the product in the units + provided. The value must be between 0 + (exclusive) and 3000 (inclusive). + product_width (google.shopping.merchant_products_v1beta.types.ProductDimension): + The width of the product in the units + provided. The value must be between 0 + (exclusive) and 3000 (inclusive). + product_weight (google.shopping.merchant_products_v1beta.types.ProductWeight): + The weight of the product in the units + provided. The value must be between 0 + (exclusive) and 2000 (inclusive). + shipping (MutableSequence[google.shopping.merchant_products_v1beta.types.Shipping]): + Shipping rules. + free_shipping_threshold (MutableSequence[google.shopping.merchant_products_v1beta.types.FreeShippingThreshold]): + Conditions to be met for a product to have + free shipping. + shipping_weight (google.shopping.merchant_products_v1beta.types.ShippingWeight): + Weight of the item for shipping. + shipping_length (google.shopping.merchant_products_v1beta.types.ShippingDimension): + Length of the item for shipping. + shipping_width (google.shopping.merchant_products_v1beta.types.ShippingDimension): + Width of the item for shipping. + shipping_height (google.shopping.merchant_products_v1beta.types.ShippingDimension): + Height of the item for shipping. + max_handling_time (int): + Maximal product handling time (in business + days). + + This field is a member of `oneof`_ ``_max_handling_time``. + min_handling_time (int): + Minimal product handling time (in business + days). + + This field is a member of `oneof`_ ``_min_handling_time``. + shipping_label (str): + The shipping label of the product, used to + group product in account-level shipping rules. + + This field is a member of `oneof`_ ``_shipping_label``. + transit_time_label (str): + The transit time label of the product, used + to group product in account-level transit time + tables. + + This field is a member of `oneof`_ ``_transit_time_label``. + size (str): + Size of the item. Only one value is allowed. For variants + with different sizes, insert a separate product for each + size with the same ``itemGroupId`` value (see + [https://support.google.com/merchants/answer/6324492](size + definition)). + + This field is a member of `oneof`_ ``_size``. + size_system (str): + System in which the size is specified. + Recommended for apparel items. + + This field is a member of `oneof`_ ``_size_system``. + size_types (MutableSequence[str]): + The cut of the item. It can be used to represent combined + size types for apparel items. Maximum two of size types can + be provided (see + [https://support.google.com/merchants/answer/6324497](size + type)). + taxes (MutableSequence[google.shopping.merchant_products_v1beta.types.Tax]): + Tax information. + tax_category (str): + The tax category of the product, used to + configure detailed tax nexus in account-level + tax settings. + + This field is a member of `oneof`_ ``_tax_category``. + energy_efficiency_class (str): + The energy efficiency class as defined in EU + directive 2010/30/EU. + + This field is a member of `oneof`_ ``_energy_efficiency_class``. + min_energy_efficiency_class (str): + The energy efficiency class as defined in EU + directive 2010/30/EU. + + This field is a member of `oneof`_ ``_min_energy_efficiency_class``. + max_energy_efficiency_class (str): + The energy efficiency class as defined in EU + directive 2010/30/EU. + + This field is a member of `oneof`_ ``_max_energy_efficiency_class``. + unit_pricing_measure (google.shopping.merchant_products_v1beta.types.UnitPricingMeasure): + The measure and dimension of an item. + unit_pricing_base_measure (google.shopping.merchant_products_v1beta.types.UnitPricingBaseMeasure): + The preference of the denominator of the unit + price. + multipack (int): + The number of identical products in a + merchant-defined multipack. + + This field is a member of `oneof`_ ``_multipack``. + ads_grouping (str): + Used to group items in an arbitrary way. Only + for CPA%, discouraged otherwise. + + This field is a member of `oneof`_ ``_ads_grouping``. + ads_labels (MutableSequence[str]): + Similar to ads_grouping, but only works on CPC. + ads_redirect (str): + Allows advertisers to override the item URL + when the product is shown within the context of + Product ads. + + This field is a member of `oneof`_ ``_ads_redirect``. + cost_of_goods_sold (google.shopping.type.types.Price): + Cost of goods sold. Used for gross profit + reporting. + product_details (MutableSequence[google.shopping.merchant_products_v1beta.types.ProductDetail]): + Technical specification or additional product + details. + product_highlights (MutableSequence[str]): + Bullet points describing the most relevant + highlights of a product. + display_ads_id (str): + An identifier for an item for dynamic + remarketing campaigns. + + This field is a member of `oneof`_ ``_display_ads_id``. + display_ads_similar_ids (MutableSequence[str]): + Advertiser-specified recommendations. + display_ads_title (str): + Title of an item for dynamic remarketing + campaigns. + + This field is a member of `oneof`_ ``_display_ads_title``. + display_ads_link (str): + URL directly to your item's landing page for + dynamic remarketing campaigns. + + This field is a member of `oneof`_ ``_display_ads_link``. + display_ads_value (float): + Offer margin for dynamic remarketing + campaigns. + + This field is a member of `oneof`_ ``_display_ads_value``. + promotion_ids (MutableSequence[str]): + The unique ID of a promotion. + pickup_method (str): + The pick up option for the item. + + This field is a member of `oneof`_ ``_pickup_method``. + pickup_sla (str): + Item store pickup timeline. + + This field is a member of `oneof`_ ``_pickup_sla``. + link_template (str): + Link template for merchant hosted local + storefront. + + This field is a member of `oneof`_ ``_link_template``. + mobile_link_template (str): + Link template for merchant hosted local + storefront optimized for mobile devices. + + This field is a member of `oneof`_ ``_mobile_link_template``. + custom_label_0 (str): + Custom label 0 for custom grouping of items + in a Shopping campaign. + + This field is a member of `oneof`_ ``_custom_label_0``. + custom_label_1 (str): + Custom label 1 for custom grouping of items + in a Shopping campaign. + + This field is a member of `oneof`_ ``_custom_label_1``. + custom_label_2 (str): + Custom label 2 for custom grouping of items + in a Shopping campaign. + + This field is a member of `oneof`_ ``_custom_label_2``. + custom_label_3 (str): + Custom label 3 for custom grouping of items + in a Shopping campaign. + + This field is a member of `oneof`_ ``_custom_label_3``. + custom_label_4 (str): + Custom label 4 for custom grouping of items + in a Shopping campaign. + + This field is a member of `oneof`_ ``_custom_label_4``. + included_destinations (MutableSequence[str]): + The list of destinations to include for this target + (corresponds to checked check boxes in Merchant Center). + Default destinations are always included unless provided in + ``excludedDestinations``. + excluded_destinations (MutableSequence[str]): + The list of destinations to exclude for this + target (corresponds to unchecked check boxes in + Merchant Center). + shopping_ads_excluded_countries (MutableSequence[str]): + List of country codes (ISO 3166-1 alpha-2) to + exclude the offer from Shopping Ads destination. + Countries from this list are removed from + countries configured in data source settings. + external_seller_id (str): + Required for multi-seller accounts. Use this + attribute if you're a marketplace uploading + products for various sellers to your + multi-seller account. + + This field is a member of `oneof`_ ``_external_seller_id``. + pause (str): + Publication of this item will be temporarily + `paused `__. + + This field is a member of `oneof`_ ``_pause``. + lifestyle_image_links (MutableSequence[str]): + Additional URLs of lifestyle images of the item, used to + explicitly identify images that showcase your item in a + real-world context. See the `Help Center + article `__ + for more information. + cloud_export_additional_properties (MutableSequence[google.shopping.merchant_products_v1beta.types.CloudExportAdditionalProperties]): + Extra fields to export to the Cloud Retail + program. + virtual_model_link (str): + URL of the 3D image of the item. See the `Help Center + article `__ + for more information. + + This field is a member of `oneof`_ ``_virtual_model_link``. + certifications (MutableSequence[google.shopping.merchant_products_v1beta.types.Certification]): + Product Certifications, for example for energy efficiency + labeling of products recorded in the `EU + EPREL `__ database. + See the `Help + Center `__ + article for more information. + structured_title (google.shopping.merchant_products_v1beta.types.ProductStructuredTitle): + Structured title, for algorithmically + (AI)-generated titles. + + This field is a member of `oneof`_ ``_structured_title``. + structured_description (google.shopping.merchant_products_v1beta.types.ProductStructuredDescription): + Structured description, for algorithmically + (AI)-generated descriptions. + + This field is a member of `oneof`_ ``_structured_description``. + auto_pricing_min_price (google.shopping.type.types.Price): + A safeguard in the "Automated Discounts" + (https://support.google.com/merchants/answer/10295759) + and "Dynamic Promotions" + (https://support.google.com/merchants/answer/13949249) + projects, ensuring that discounts on merchants' + offers do not fall below this value, thereby + preserving the offer's value and profitability. + """ + + identifier_exists: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + is_bundle: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + link: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + mobile_link: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + canonical_link: str = proto.Field( + proto.STRING, + number=10, + optional=True, + ) + image_link: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + additional_image_links: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + expiration_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + disclosure_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=79, + message=timestamp_pb2.Timestamp, + ) + adult: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + age_group: str = proto.Field( + proto.STRING, + number=18, + optional=True, + ) + availability: str = proto.Field( + proto.STRING, + number=19, + optional=True, + ) + availability_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=20, + message=timestamp_pb2.Timestamp, + ) + brand: str = proto.Field( + proto.STRING, + number=21, + optional=True, + ) + color: str = proto.Field( + proto.STRING, + number=22, + optional=True, + ) + condition: str = proto.Field( + proto.STRING, + number=23, + optional=True, + ) + gender: str = proto.Field( + proto.STRING, + number=24, + optional=True, + ) + google_product_category: str = proto.Field( + proto.STRING, + number=25, + optional=True, + ) + gtin: str = proto.Field( + proto.STRING, + number=26, + optional=True, + ) + item_group_id: str = proto.Field( + proto.STRING, + number=27, + optional=True, + ) + material: str = proto.Field( + proto.STRING, + number=28, + optional=True, + ) + mpn: str = proto.Field( + proto.STRING, + number=29, + optional=True, + ) + pattern: str = proto.Field( + proto.STRING, + number=30, + optional=True, + ) + price: types.Price = proto.Field( + proto.MESSAGE, + number=31, + message=types.Price, + ) + installment: "Installment" = proto.Field( + proto.MESSAGE, + number=32, + message="Installment", + ) + subscription_cost: "SubscriptionCost" = proto.Field( + proto.MESSAGE, + number=33, + message="SubscriptionCost", + ) + loyalty_points: "LoyaltyPoints" = proto.Field( + proto.MESSAGE, + number=34, + message="LoyaltyPoints", + ) + loyalty_programs: MutableSequence["LoyaltyProgram"] = proto.RepeatedField( + proto.MESSAGE, + number=136, + message="LoyaltyProgram", + ) + product_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=35, + ) + sale_price: types.Price = proto.Field( + proto.MESSAGE, + number=36, + message=types.Price, + ) + sale_price_effective_date: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=37, + message=interval_pb2.Interval, + ) + sell_on_google_quantity: int = proto.Field( + proto.INT64, + number=38, + optional=True, + ) + product_height: "ProductDimension" = proto.Field( + proto.MESSAGE, + number=119, + message="ProductDimension", + ) + product_length: "ProductDimension" = proto.Field( + proto.MESSAGE, + number=120, + message="ProductDimension", + ) + product_width: "ProductDimension" = proto.Field( + proto.MESSAGE, + number=121, + message="ProductDimension", + ) + product_weight: "ProductWeight" = proto.Field( + proto.MESSAGE, + number=122, + message="ProductWeight", + ) + shipping: MutableSequence["Shipping"] = proto.RepeatedField( + proto.MESSAGE, + number=39, + message="Shipping", + ) + free_shipping_threshold: MutableSequence[ + "FreeShippingThreshold" + ] = proto.RepeatedField( + proto.MESSAGE, + number=135, + message="FreeShippingThreshold", + ) + shipping_weight: "ShippingWeight" = proto.Field( + proto.MESSAGE, + number=40, + message="ShippingWeight", + ) + shipping_length: "ShippingDimension" = proto.Field( + proto.MESSAGE, + number=41, + message="ShippingDimension", + ) + shipping_width: "ShippingDimension" = proto.Field( + proto.MESSAGE, + number=42, + message="ShippingDimension", + ) + shipping_height: "ShippingDimension" = proto.Field( + proto.MESSAGE, + number=43, + message="ShippingDimension", + ) + max_handling_time: int = proto.Field( + proto.INT64, + number=44, + optional=True, + ) + min_handling_time: int = proto.Field( + proto.INT64, + number=45, + optional=True, + ) + shipping_label: str = proto.Field( + proto.STRING, + number=46, + optional=True, + ) + transit_time_label: str = proto.Field( + proto.STRING, + number=47, + optional=True, + ) + size: str = proto.Field( + proto.STRING, + number=48, + optional=True, + ) + size_system: str = proto.Field( + proto.STRING, + number=49, + optional=True, + ) + size_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=50, + ) + taxes: MutableSequence["Tax"] = proto.RepeatedField( + proto.MESSAGE, + number=51, + message="Tax", + ) + tax_category: str = proto.Field( + proto.STRING, + number=52, + optional=True, + ) + energy_efficiency_class: str = proto.Field( + proto.STRING, + number=53, + optional=True, + ) + min_energy_efficiency_class: str = proto.Field( + proto.STRING, + number=54, + optional=True, + ) + max_energy_efficiency_class: str = proto.Field( + proto.STRING, + number=55, + optional=True, + ) + unit_pricing_measure: "UnitPricingMeasure" = proto.Field( + proto.MESSAGE, + number=56, + message="UnitPricingMeasure", + ) + unit_pricing_base_measure: "UnitPricingBaseMeasure" = proto.Field( + proto.MESSAGE, + number=57, + message="UnitPricingBaseMeasure", + ) + multipack: int = proto.Field( + proto.INT64, + number=58, + optional=True, + ) + ads_grouping: str = proto.Field( + proto.STRING, + number=59, + optional=True, + ) + ads_labels: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=60, + ) + ads_redirect: str = proto.Field( + proto.STRING, + number=61, + optional=True, + ) + cost_of_goods_sold: types.Price = proto.Field( + proto.MESSAGE, + number=62, + message=types.Price, + ) + product_details: MutableSequence["ProductDetail"] = proto.RepeatedField( + proto.MESSAGE, + number=63, + message="ProductDetail", + ) + product_highlights: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=64, + ) + display_ads_id: str = proto.Field( + proto.STRING, + number=65, + optional=True, + ) + display_ads_similar_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=66, + ) + display_ads_title: str = proto.Field( + proto.STRING, + number=67, + optional=True, + ) + display_ads_link: str = proto.Field( + proto.STRING, + number=68, + optional=True, + ) + display_ads_value: float = proto.Field( + proto.DOUBLE, + number=69, + optional=True, + ) + promotion_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=70, + ) + pickup_method: str = proto.Field( + proto.STRING, + number=80, + optional=True, + ) + pickup_sla: str = proto.Field( + proto.STRING, + number=81, + optional=True, + ) + link_template: str = proto.Field( + proto.STRING, + number=82, + optional=True, + ) + mobile_link_template: str = proto.Field( + proto.STRING, + number=83, + optional=True, + ) + custom_label_0: str = proto.Field( + proto.STRING, + number=71, + optional=True, + ) + custom_label_1: str = proto.Field( + proto.STRING, + number=72, + optional=True, + ) + custom_label_2: str = proto.Field( + proto.STRING, + number=73, + optional=True, + ) + custom_label_3: str = proto.Field( + proto.STRING, + number=74, + optional=True, + ) + custom_label_4: str = proto.Field( + proto.STRING, + number=75, + optional=True, + ) + included_destinations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=76, + ) + excluded_destinations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=77, + ) + shopping_ads_excluded_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=78, + ) + external_seller_id: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + pause: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + lifestyle_image_links: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + cloud_export_additional_properties: MutableSequence[ + "CloudExportAdditionalProperties" + ] = proto.RepeatedField( + proto.MESSAGE, + number=84, + message="CloudExportAdditionalProperties", + ) + virtual_model_link: str = proto.Field( + proto.STRING, + number=130, + optional=True, + ) + certifications: MutableSequence["Certification"] = proto.RepeatedField( + proto.MESSAGE, + number=123, + message="Certification", + ) + structured_title: "ProductStructuredTitle" = proto.Field( + proto.MESSAGE, + number=132, + optional=True, + message="ProductStructuredTitle", + ) + structured_description: "ProductStructuredDescription" = proto.Field( + proto.MESSAGE, + number=133, + optional=True, + message="ProductStructuredDescription", + ) + auto_pricing_min_price: types.Price = proto.Field( + proto.MESSAGE, + number=124, + message=types.Price, + ) + + +class Tax(proto.Message): + r"""The Tax of the product. + + Attributes: + rate (float): + The percentage of tax rate that applies to + the item price. + country (str): + The country within which the item is taxed, specified as a + `CLDR territory + code `__. + region (str): + The geographic region to which the tax rate + applies. + tax_ship (bool): + Set to true if tax is charged on shipping. + location_id (int): + The numeric ID of a location that the tax rate applies to as + defined in the `AdWords + API `__. + postal_code (str): + The postal code range that the tax rate applies to, + represented by a ZIP code, a ZIP code prefix using \* + wildcard, a range between two ZIP codes or two ZIP code + prefixes of equal length. Examples: 94114, 94*, 94002-95460, + 94*-95*. + """ + + rate: float = proto.Field( + proto.DOUBLE, + number=1, + ) + country: str = proto.Field( + proto.STRING, + number=2, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + tax_ship: bool = proto.Field( + proto.BOOL, + number=4, + ) + location_id: int = proto.Field( + proto.INT64, + number=5, + ) + postal_code: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ShippingWeight(proto.Message): + r"""The ShippingWeight of the product. + + Attributes: + value (float): + The weight of the product used to calculate + the shipping cost of the item. + unit (str): + The unit of value. + """ + + value: float = proto.Field( + proto.DOUBLE, + number=1, + ) + unit: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ShippingDimension(proto.Message): + r"""The ShippingDimension of the product. + + Attributes: + value (float): + The dimension of the product used to + calculate the shipping cost of the item. + unit (str): + The unit of value. + """ + + value: float = proto.Field( + proto.DOUBLE, + number=1, + ) + unit: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UnitPricingBaseMeasure(proto.Message): + r"""The UnitPricingBaseMeasure of the product. + + Attributes: + value (int): + The denominator of the unit price. + unit (str): + The unit of the denominator. + """ + + value: int = proto.Field( + proto.INT64, + number=1, + ) + unit: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UnitPricingMeasure(proto.Message): + r"""The UnitPricingMeasure of the product. + + Attributes: + value (float): + The measure of an item. + unit (str): + The unit of the measure. + """ + + value: float = proto.Field( + proto.DOUBLE, + number=1, + ) + unit: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SubscriptionCost(proto.Message): + r"""The SubscriptionCost of the product. + + Attributes: + period (google.shopping.merchant_products_v1beta.types.SubscriptionPeriod): + The type of subscription period. Supported values are: + + - "``month``" + - "``year``". + period_length (int): + The number of subscription periods the buyer + has to pay. + amount (google.shopping.type.types.Price): + The amount the buyer has to pay per + subscription period. + """ + + period: "SubscriptionPeriod" = proto.Field( + proto.ENUM, + number=1, + enum="SubscriptionPeriod", + ) + period_length: int = proto.Field( + proto.INT64, + number=2, + ) + amount: types.Price = proto.Field( + proto.MESSAGE, + number=3, + message=types.Price, + ) + + +class Installment(proto.Message): + r"""A message that represents installment. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + months (int): + The number of installments the buyer has to + pay. + amount (google.shopping.type.types.Price): + The amount the buyer has to pay per month. + downpayment (google.shopping.type.types.Price): + The up-front down payment amount the buyer + has to pay. + + This field is a member of `oneof`_ ``_downpayment``. + credit_type (str): + Type of installment payments. Supported values are: + + - "``finance``" + - "``lease``". + + This field is a member of `oneof`_ ``_credit_type``. + """ + + months: int = proto.Field( + proto.INT64, + number=1, + ) + amount: types.Price = proto.Field( + proto.MESSAGE, + number=2, + message=types.Price, + ) + downpayment: types.Price = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=types.Price, + ) + credit_type: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class LoyaltyPoints(proto.Message): + r"""A message that represents loyalty points. + + Attributes: + name (str): + Name of loyalty points program. It is + recommended to limit the name to 12 full-width + characters or 24 Roman characters. + points_value (int): + The retailer's loyalty points in absolute + value. + ratio (float): + The ratio of a point when converted to + currency. Google assumes currency based on + Merchant Center settings. If ratio is left out, + it defaults to 1.0. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + points_value: int = proto.Field( + proto.INT64, + number=2, + ) + ratio: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class LoyaltyProgram(proto.Message): + r"""A message that represents loyalty program. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + program_label (str): + The label of the loyalty program. This is an + internal label that uniquely identifies the + relationship between a merchant entity and a + loyalty program entity. The label must be + provided so that the system can associate the + assets below (for example, price and points) + with a merchant. The corresponding program must + be linked to the merchant account. + + This field is a member of `oneof`_ ``_program_label``. + tier_label (str): + The label of the tier within the loyalty + program. Must match one of the labels within the + program. + + This field is a member of `oneof`_ ``_tier_label``. + price (google.shopping.type.types.Price): + The price for members of the given tier, that + is, the instant discount price. Must be smaller + or equal to the regular price. + + This field is a member of `oneof`_ ``_price``. + cashback_for_future_use (google.shopping.type.types.Price): + The cashback that can be used for future + purchases. + + This field is a member of `oneof`_ ``_cashback_for_future_use``. + loyalty_points (int): + The amount of loyalty points earned on a + purchase. + + This field is a member of `oneof`_ ``_loyalty_points``. + """ + + program_label: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tier_label: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + price: types.Price = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=types.Price, + ) + cashback_for_future_use: types.Price = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=types.Price, + ) + loyalty_points: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + + +class Shipping(proto.Message): + r"""The Shipping of the product. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + price (google.shopping.type.types.Price): + Fixed shipping price, represented as a + number. + country (str): + The `CLDR territory + code `__ + of the country to which an item will ship. + region (str): + The geographic region to which a shipping rate applies. See + `region `__ + for more information. + service (str): + A free-form description of the service class + or delivery speed. + location_id (int): + The numeric ID of a location that the shipping rate applies + to as defined in the `AdWords + API `__. + location_group_name (str): + The location where the shipping is + applicable, represented by a location group + name. + postal_code (str): + The postal code range that the shipping rate applies to, + represented by a postal code, a postal code prefix followed + by a \* wildcard, a range between two postal codes or two + postal code prefixes of equal length. + min_handling_time (int): + Minimum handling time (inclusive) between when the order is + received and shipped in business days. 0 means that the + order is shipped on the same day as it is received if it + happens before the cut-off time. + [minHandlingTime][google.shopping.content.bundles.Products.Shipping.min_handling_time] + can only be present together with + [maxHandlingTime][google.shopping.content.bundles.Products.Shipping.max_handling_time]; + but it is not required if + [maxHandlingTime][google.shopping.content.bundles.Products.Shipping.max_handling_time] + is present. + + This field is a member of `oneof`_ ``_min_handling_time``. + max_handling_time (int): + Maximum handling time (inclusive) between when the order is + received and shipped in business days. 0 means that the + order is shipped on the same day as it is received if it + happens before the cut-off time. Both + [maxHandlingTime][google.shopping.content.bundles.Products.Shipping.max_handling_time] + and + [maxTransitTime][google.shopping.content.bundles.Products.Shipping.max_transit_time] + are required if providing shipping speeds. + [minHandlingTime][google.shopping.content.bundles.Products.Shipping.min_handling_time] + is optional if + [maxHandlingTime][google.shopping.content.bundles.Products.Shipping.max_handling_time] + is present. + + This field is a member of `oneof`_ ``_max_handling_time``. + min_transit_time (int): + Minimum transit time (inclusive) between when the order has + shipped and when it is delivered in business days. 0 means + that the order is delivered on the same day as it ships. + [minTransitTime][google.shopping.content.bundles.Products.Shipping.min_transit_time] + can only be present together with + [maxTransitTime][google.shopping.content.bundles.Products.Shipping.max_transit_time]; + but it is not required if + [maxTransitTime][google.shopping.content.bundles.Products.Shipping.max_transit_time] + is present. + + This field is a member of `oneof`_ ``_min_transit_time``. + max_transit_time (int): + Maximum transit time (inclusive) between when the order has + shipped and when it is delivered in business days. 0 means + that the order is delivered on the same day as it ships. + Both + [maxHandlingTime][google.shopping.content.bundles.Products.Shipping.max_handling_time] + and + [maxTransitTime][google.shopping.content.bundles.Products.Shipping.max_transit_time] + are required if providing shipping speeds. + [minTransitTime][google.shopping.content.bundles.Products.Shipping.min_transit_time] + is optional if + [maxTransitTime][google.shopping.content.bundles.Products.Shipping.max_transit_time] + is present. + + This field is a member of `oneof`_ ``_max_transit_time``. + """ + + price: types.Price = proto.Field( + proto.MESSAGE, + number=1, + message=types.Price, + ) + country: str = proto.Field( + proto.STRING, + number=2, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + service: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: int = proto.Field( + proto.INT64, + number=5, + ) + location_group_name: str = proto.Field( + proto.STRING, + number=6, + ) + postal_code: str = proto.Field( + proto.STRING, + number=7, + ) + min_handling_time: int = proto.Field( + proto.INT64, + number=8, + optional=True, + ) + max_handling_time: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + min_transit_time: int = proto.Field( + proto.INT64, + number=10, + optional=True, + ) + max_transit_time: int = proto.Field( + proto.INT64, + number=11, + optional=True, + ) + + +class FreeShippingThreshold(proto.Message): + r"""Conditions to be met for a product to have free shipping. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + country (str): + The `CLDR territory + code `__ + of the country to which an item will ship. + + This field is a member of `oneof`_ ``_country``. + price_threshold (google.shopping.type.types.Price): + The minimum product price for the shipping + cost to become free. Represented as a number. + + This field is a member of `oneof`_ ``_price_threshold``. + """ + + country: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + price_threshold: types.Price = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=types.Price, + ) + + +class ProductDetail(proto.Message): + r"""The product details. + + Attributes: + section_name (str): + The section header used to group a set of + product details. + attribute_name (str): + The name of the product detail. + attribute_value (str): + The value of the product detail. + """ + + section_name: str = proto.Field( + proto.STRING, + number=1, + ) + attribute_name: str = proto.Field( + proto.STRING, + number=2, + ) + attribute_value: str = proto.Field( + proto.STRING, + number=3, + ) + + +class Certification(proto.Message): + r"""Product + `certification `__, + initially introduced for EU energy efficiency labeling compliance + using the EU EPREL database. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + certification_authority (str): + The certification authority, for example + "European_Commission". Maximum length is 2000 characters. + + This field is a member of `oneof`_ ``_certification_authority``. + certification_name (str): + The name of the certification, for example + "EPREL". Maximum length is 2000 characters. + + This field is a member of `oneof`_ ``_certification_name``. + certification_code (str): + The certification code. + Maximum length is 2000 characters. + + This field is a member of `oneof`_ ``_certification_code``. + certification_value (str): + The certification value (also known as class, + level or grade), for example "A+", "C", "gold". + Maximum length is 2000 characters. + + This field is a member of `oneof`_ ``_certification_value``. + """ + + certification_authority: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + certification_name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + certification_code: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + certification_value: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class ProductStructuredTitle(proto.Message): + r"""Structured title, for algorithmically (AI)-generated titles. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + digital_source_type (str): + The digital source type, for example + "trained_algorithmic_media". Following + `IPTC `__. + Maximum length is 40 characters. + + This field is a member of `oneof`_ ``_digital_source_type``. + content (str): + The title text + Maximum length is 150 characters + + This field is a member of `oneof`_ ``_content``. + """ + + digital_source_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + content: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class ProductStructuredDescription(proto.Message): + r"""Structured description, for algorithmically (AI)-generated + descriptions. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + digital_source_type (str): + The digital source type, for example + "trained_algorithmic_media". Following + `IPTC `__. + Maximum length is 40 characters. + + This field is a member of `oneof`_ ``_digital_source_type``. + content (str): + The description text + Maximum length is 5000 characters + + This field is a member of `oneof`_ ``_content``. + """ + + digital_source_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + content: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class ProductDimension(proto.Message): + r"""The dimension of the product. + + Attributes: + value (float): + Required. The dimension value represented as + a number. The value can have a maximum precision + of four decimal places. + unit (str): + Required. The dimension units. Acceptable values are: + + - "``in``" + - "``cm``". + """ + + value: float = proto.Field( + proto.DOUBLE, + number=1, + ) + unit: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ProductWeight(proto.Message): + r"""The weight of the product. + + Attributes: + value (float): + Required. The weight represented as a number. + The weight can have a maximum precision of four + decimal places. + unit (str): + Required. The weight unit. Acceptable values are: + + - "``g``" + - "``kg``" + - "``oz``" + - "``lb``". + """ + + value: float = proto.Field( + proto.DOUBLE, + number=1, + ) + unit: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ProductStatus(proto.Message): + r"""The status of a product, data validation issues, that is, + information about a product computed asynchronously. + + Attributes: + destination_statuses (MutableSequence[google.shopping.merchant_products_v1beta.types.ProductStatus.DestinationStatus]): + The intended destinations for the product. + item_level_issues (MutableSequence[google.shopping.merchant_products_v1beta.types.ProductStatus.ItemLevelIssue]): + A list of all issues associated with the + product. + creation_date (google.protobuf.timestamp_pb2.Timestamp): + Date on which the item has been created, in `ISO + 8601 `__ format. + last_update_date (google.protobuf.timestamp_pb2.Timestamp): + Date on which the item has been last updated, in `ISO + 8601 `__ format. + google_expiration_date (google.protobuf.timestamp_pb2.Timestamp): + Date on which the item expires, in `ISO + 8601 `__ format. + """ + + class DestinationStatus(proto.Message): + r"""The destination status of the product status. + + Attributes: + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + The name of the reporting context. + approved_countries (MutableSequence[str]): + List of country codes (ISO 3166-1 alpha-2) + where the offer is approved. + pending_countries (MutableSequence[str]): + List of country codes (ISO 3166-1 alpha-2) + where the offer is pending approval. + disapproved_countries (MutableSequence[str]): + List of country codes (ISO 3166-1 alpha-2) + where the offer is disapproved. + """ + + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=1, + enum=types.ReportingContext.ReportingContextEnum, + ) + approved_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + pending_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + disapproved_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ItemLevelIssue(proto.Message): + r"""The ItemLevelIssue of the product status. + + Attributes: + code (str): + The error code of the issue. + severity (google.shopping.merchant_products_v1beta.types.ProductStatus.ItemLevelIssue.Severity): + How this issue affects serving of the offer. + resolution (str): + Whether the issue can be resolved by the + merchant. + attribute (str): + The attribute's name, if the issue is caused + by a single attribute. + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + The reporting context the issue applies to. + description (str): + A short issue description in English. + detail (str): + A detailed issue description in English. + documentation (str): + The URL of a web page to help with resolving + this issue. + applicable_countries (MutableSequence[str]): + List of country codes (ISO 3166-1 alpha-2) + where issue applies to the offer. + """ + + class Severity(proto.Enum): + r"""How the issue affects the serving of the product. + + Values: + SEVERITY_UNSPECIFIED (0): + Not specified. + NOT_IMPACTED (1): + This issue represents a warning and does not + have a direct affect on the product. + DEMOTED (2): + The product is demoted and most likely have + limited performance in search results + DISAPPROVED (3): + Issue disapproves the product. + """ + SEVERITY_UNSPECIFIED = 0 + NOT_IMPACTED = 1 + DEMOTED = 2 + DISAPPROVED = 3 + + code: str = proto.Field( + proto.STRING, + number=1, + ) + severity: "ProductStatus.ItemLevelIssue.Severity" = proto.Field( + proto.ENUM, + number=2, + enum="ProductStatus.ItemLevelIssue.Severity", + ) + resolution: str = proto.Field( + proto.STRING, + number=3, + ) + attribute: str = proto.Field( + proto.STRING, + number=4, + ) + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=5, + enum=types.ReportingContext.ReportingContextEnum, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + detail: str = proto.Field( + proto.STRING, + number=7, + ) + documentation: str = proto.Field( + proto.STRING, + number=8, + ) + applicable_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + + destination_statuses: MutableSequence[DestinationStatus] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=DestinationStatus, + ) + item_level_issues: MutableSequence[ItemLevelIssue] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ItemLevelIssue, + ) + creation_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + last_update_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + google_expiration_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +class CloudExportAdditionalProperties(proto.Message): + r"""Product property for the Cloud Retail API. + For example, properties for a TV product could be + "Screen-Resolution" or "Screen-Size". + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + property_name (str): + Name of the given property. For example, + "Screen-Resolution" for a TV product. Maximum + string size is 256 characters. + + This field is a member of `oneof`_ ``_property_name``. + text_value (MutableSequence[str]): + Text value of the given property. For + example, "8K(UHD)" could be a text value for a + TV product. Maximum repeatedness of this value + is 400. Values are stored in an arbitrary but + consistent order. Maximum string size is 256 + characters. + bool_value (bool): + Boolean value of the given property. For + example for a TV product, "True" or "False" if + the screen is UHD. + + This field is a member of `oneof`_ ``_bool_value``. + int_value (MutableSequence[int]): + Integer values of the given property. For + example, 1080 for a TV product's Screen + Resolution. Maximum repeatedness of this value + is 400. Values are stored in an arbitrary but + consistent order. + float_value (MutableSequence[float]): + Float values of the given property. For + example for a TV product 1.2345. Maximum + repeatedness of this value is 400. Values are + stored in an arbitrary but consistent order. + min_value (float): + Minimum float value of the given property. + For example for a TV product 1.00. + + This field is a member of `oneof`_ ``_min_value``. + max_value (float): + Maximum float value of the given property. + For example for a TV product 100.00. + + This field is a member of `oneof`_ ``_max_value``. + unit_code (str): + Unit of the given property. For example, + "Pixels" for a TV product. Maximum string size + is 256B. + + This field is a member of `oneof`_ ``_unit_code``. + """ + + property_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + text_value: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + int_value: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=4, + ) + float_value: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=5, + ) + min_value: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + max_value: float = proto.Field( + proto.FLOAT, + number=7, + optional=True, + ) + unit_code: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-products/mypy.ini b/packages/google-shopping-merchant-products/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-shopping-merchant-products/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-shopping-merchant-products/noxfile.py b/packages/google-shopping-merchant-products/noxfile.py new file mode 100644 index 000000000000..1e6cd48d0529 --- /dev/null +++ b/packages/google-shopping-merchant-products/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_delete_product_input_async.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_delete_product_input_async.py new file mode 100644 index 000000000000..6674700d5aad --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_delete_product_input_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteProductInput +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductInputsService_DeleteProductInput_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +async def sample_delete_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.DeleteProductInputRequest( + name="name_value", + data_source="data_source_value", + ) + + # Make the request + await client.delete_product_input(request=request) + + +# [END merchantapi_v1beta_generated_ProductInputsService_DeleteProductInput_async] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_delete_product_input_sync.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_delete_product_input_sync.py new file mode 100644 index 000000000000..9124f5eb48f6 --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_delete_product_input_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteProductInput +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductInputsService_DeleteProductInput_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +def sample_delete_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.DeleteProductInputRequest( + name="name_value", + data_source="data_source_value", + ) + + # Make the request + client.delete_product_input(request=request) + + +# [END merchantapi_v1beta_generated_ProductInputsService_DeleteProductInput_sync] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_insert_product_input_async.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_insert_product_input_async.py new file mode 100644 index 000000000000..f42f63bd9437 --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_insert_product_input_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InsertProductInput +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductInputsService_InsertProductInput_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +async def sample_insert_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceAsyncClient() + + # Initialize request argument(s) + product_input = merchant_products_v1beta.ProductInput() + product_input.channel = "LOCAL" + product_input.offer_id = "offer_id_value" + product_input.content_language = "content_language_value" + product_input.feed_label = "feed_label_value" + + request = merchant_products_v1beta.InsertProductInputRequest( + parent="parent_value", + product_input=product_input, + data_source="data_source_value", + ) + + # Make the request + response = await client.insert_product_input(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProductInputsService_InsertProductInput_async] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_insert_product_input_sync.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_insert_product_input_sync.py new file mode 100644 index 000000000000..2a339ed09b3c --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_product_inputs_service_insert_product_input_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InsertProductInput +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductInputsService_InsertProductInput_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +def sample_insert_product_input(): + # Create a client + client = merchant_products_v1beta.ProductInputsServiceClient() + + # Initialize request argument(s) + product_input = merchant_products_v1beta.ProductInput() + product_input.channel = "LOCAL" + product_input.offer_id = "offer_id_value" + product_input.content_language = "content_language_value" + product_input.feed_label = "feed_label_value" + + request = merchant_products_v1beta.InsertProductInputRequest( + parent="parent_value", + product_input=product_input, + data_source="data_source_value", + ) + + # Make the request + response = client.insert_product_input(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProductInputsService_InsertProductInput_sync] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_get_product_async.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_get_product_async.py new file mode 100644 index 000000000000..6f176867a7f1 --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_get_product_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProduct +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductsService_GetProduct_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +async def sample_get_product(): + # Create a client + client = merchant_products_v1beta.ProductsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.GetProductRequest( + name="name_value", + ) + + # Make the request + response = await client.get_product(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProductsService_GetProduct_async] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_get_product_sync.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_get_product_sync.py new file mode 100644 index 000000000000..9cbb8a0c4bba --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_get_product_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProduct +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductsService_GetProduct_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +def sample_get_product(): + # Create a client + client = merchant_products_v1beta.ProductsServiceClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.GetProductRequest( + name="name_value", + ) + + # Make the request + response = client.get_product(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_ProductsService_GetProduct_sync] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_list_products_async.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_list_products_async.py new file mode 100644 index 000000000000..2c618af2956b --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_list_products_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProducts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductsService_ListProducts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +async def sample_list_products(): + # Create a client + client = merchant_products_v1beta.ProductsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.ListProductsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_products(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_ProductsService_ListProducts_async] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_list_products_sync.py b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_list_products_sync.py new file mode 100644 index 000000000000..b76624178a59 --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/merchantapi_v1beta_generated_products_service_list_products_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProducts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-products + + +# [START merchantapi_v1beta_generated_ProductsService_ListProducts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_products_v1beta + + +def sample_list_products(): + # Create a client + client = merchant_products_v1beta.ProductsServiceClient() + + # Initialize request argument(s) + request = merchant_products_v1beta.ListProductsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_products(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_ProductsService_ListProducts_sync] diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json b/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json new file mode 100644 index 000000000000..10f94f8cadc7 --- /dev/null +++ b/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json @@ -0,0 +1,645 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.shopping.merchant.products.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-shopping-merchant-products", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceAsyncClient", + "shortName": "ProductInputsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceAsyncClient.delete_product_input", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService.DeleteProductInput", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService", + "shortName": "ProductInputsService" + }, + "shortName": "DeleteProductInput" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.DeleteProductInputRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_product_input" + }, + "description": "Sample for DeleteProductInput", + "file": "merchantapi_v1beta_generated_product_inputs_service_delete_product_input_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductInputsService_DeleteProductInput_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_product_inputs_service_delete_product_input_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceClient", + "shortName": "ProductInputsServiceClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceClient.delete_product_input", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService.DeleteProductInput", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService", + "shortName": "ProductInputsService" + }, + "shortName": "DeleteProductInput" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.DeleteProductInputRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_product_input" + }, + "description": "Sample for DeleteProductInput", + "file": "merchantapi_v1beta_generated_product_inputs_service_delete_product_input_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductInputsService_DeleteProductInput_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_product_inputs_service_delete_product_input_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceAsyncClient", + "shortName": "ProductInputsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceAsyncClient.insert_product_input", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService.InsertProductInput", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService", + "shortName": "ProductInputsService" + }, + "shortName": "InsertProductInput" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.InsertProductInputRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_products_v1beta.types.ProductInput", + "shortName": "insert_product_input" + }, + "description": "Sample for InsertProductInput", + "file": "merchantapi_v1beta_generated_product_inputs_service_insert_product_input_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductInputsService_InsertProductInput_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_product_inputs_service_insert_product_input_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceClient", + "shortName": "ProductInputsServiceClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductInputsServiceClient.insert_product_input", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService.InsertProductInput", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductInputsService", + "shortName": "ProductInputsService" + }, + "shortName": "InsertProductInput" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.InsertProductInputRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_products_v1beta.types.ProductInput", + "shortName": "insert_product_input" + }, + "description": "Sample for InsertProductInput", + "file": "merchantapi_v1beta_generated_product_inputs_service_insert_product_input_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductInputsService_InsertProductInput_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_product_inputs_service_insert_product_input_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceAsyncClient", + "shortName": "ProductsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceAsyncClient.get_product", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService.GetProduct", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService", + "shortName": "ProductsService" + }, + "shortName": "GetProduct" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.GetProductRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_products_v1beta.types.Product", + "shortName": "get_product" + }, + "description": "Sample for GetProduct", + "file": "merchantapi_v1beta_generated_products_service_get_product_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductsService_GetProduct_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_products_service_get_product_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceClient", + "shortName": "ProductsServiceClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceClient.get_product", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService.GetProduct", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService", + "shortName": "ProductsService" + }, + "shortName": "GetProduct" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.GetProductRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_products_v1beta.types.Product", + "shortName": "get_product" + }, + "description": "Sample for GetProduct", + "file": "merchantapi_v1beta_generated_products_service_get_product_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductsService_GetProduct_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_products_service_get_product_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceAsyncClient", + "shortName": "ProductsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceAsyncClient.list_products", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService.ListProducts", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService", + "shortName": "ProductsService" + }, + "shortName": "ListProducts" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.ListProductsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_products_v1beta.services.products_service.pagers.ListProductsAsyncPager", + "shortName": "list_products" + }, + "description": "Sample for ListProducts", + "file": "merchantapi_v1beta_generated_products_service_list_products_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductsService_ListProducts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_products_service_list_products_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceClient", + "shortName": "ProductsServiceClient" + }, + "fullName": "google.shopping.merchant_products_v1beta.ProductsServiceClient.list_products", + "method": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService.ListProducts", + "service": { + "fullName": "google.shopping.merchant.products.v1beta.ProductsService", + "shortName": "ProductsService" + }, + "shortName": "ListProducts" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_products_v1beta.types.ListProductsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_products_v1beta.services.products_service.pagers.ListProductsPager", + "shortName": "list_products" + }, + "description": "Sample for ListProducts", + "file": "merchantapi_v1beta_generated_products_service_list_products_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_ProductsService_ListProducts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_products_service_list_products_sync.py" + } + ] +} diff --git a/packages/google-shopping-merchant-products/scripts/decrypt-secrets.sh b/packages/google-shopping-merchant-products/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-shopping-merchant-products/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1alpha_keywords.py b/packages/google-shopping-merchant-products/scripts/fixup_merchant_products_v1beta_keywords.py similarity index 90% rename from packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1alpha_keywords.py rename to packages/google-shopping-merchant-products/scripts/fixup_merchant_products_v1beta_keywords.py index b2c5c4e62f19..9ec72e193341 100644 --- a/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1alpha_keywords.py +++ b/packages/google-shopping-merchant-products/scripts/fixup_merchant_products_v1beta_keywords.py @@ -36,16 +36,13 @@ def partition( return results[1], results[0] -class mapsplatformdatasetsCallTransformer(cst.CSTTransformer): +class merchant_productsCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_dataset': ('parent', 'dataset', ), - 'delete_dataset': ('name', 'force', ), - 'delete_dataset_version': ('name', ), - 'get_dataset': ('name', 'published_usage', ), - 'list_datasets': ('parent', 'page_size', 'page_token', ), - 'list_dataset_versions': ('name', 'page_size', 'page_token', ), - 'update_dataset_metadata': ('dataset', 'update_mask', ), + 'delete_product_input': ('name', 'data_source', ), + 'get_product': ('name', ), + 'insert_product_input': ('parent', 'product_input', 'data_source', ), + 'list_products': ('parent', 'page_size', 'page_token', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -94,7 +91,7 @@ def fix_files( in_dir: pathlib.Path, out_dir: pathlib.Path, *, - transformer=mapsplatformdatasetsCallTransformer(), + transformer=merchant_productsCallTransformer(), ): """Duplicate the input dir to the output dir, fixing file method calls. @@ -127,7 +124,7 @@ def fix_files( if __name__ == '__main__': parser = argparse.ArgumentParser( - description="""Fix up source that uses the mapsplatformdatasets client library. + description="""Fix up source that uses the merchant_products client library. The existing sources are NOT overwritten but are copied to output_dir with changes made. diff --git a/packages/google-shopping-merchant-products/setup.py b/packages/google-shopping-merchant-products/setup.py new file mode 100644 index 000000000000..ae34425da881 --- /dev/null +++ b/packages/google-shopping-merchant-products/setup.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-shopping-merchant-products" + + +description = "Google Shopping Merchant Products API client library" + +version = None + +with open( + os.path.join(package_root, "google/shopping/merchant_products/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-shopping-type >= 0.1.0, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-shopping-merchant-products/testing/.gitignore b/packages/google-shopping-merchant-products/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.10.txt b/packages/google-shopping-merchant-products/testing/constraints-3.10.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.11.txt b/packages/google-shopping-merchant-products/testing/constraints-3.11.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.12.txt b/packages/google-shopping-merchant-products/testing/constraints-3.12.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.7.txt b/packages/google-shopping-merchant-products/testing/constraints-3.7.txt new file mode 100644 index 000000000000..26e4a9895f74 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 +google-shopping-type==0.1.0 diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.8.txt b/packages/google-shopping-merchant-products/testing/constraints-3.8.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.9.txt b/packages/google-shopping-merchant-products/testing/constraints-3.9.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-products/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-products/tests/__init__.py b/packages/google-shopping-merchant-products/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-products/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-products/tests/unit/__init__.py b/packages/google-shopping-merchant-products/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-products/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/__init__.py b/packages/google-shopping-merchant-products/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/__init__.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py new file mode 100644 index 000000000000..cf2e5d896113 --- /dev/null +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py @@ -0,0 +1,3562 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.shopping.type.types import types +from google.type import interval_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_products_v1beta.services.product_inputs_service import ( + ProductInputsServiceAsyncClient, + ProductInputsServiceClient, + transports, +) +from google.shopping.merchant_products_v1beta.types import ( + productinputs, + products_common, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProductInputsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ProductInputsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ProductInputsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ProductInputsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProductInputsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProductInputsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert ProductInputsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ProductInputsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ProductInputsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ProductInputsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ProductInputsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ProductInputsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ProductInputsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ProductInputsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ProductInputsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ProductInputsServiceClient._get_client_cert_source(None, False) is None + assert ( + ProductInputsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ProductInputsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ProductInputsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ProductInputsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ProductInputsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceClient), +) +@mock.patch.object( + ProductInputsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ProductInputsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProductInputsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProductInputsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ProductInputsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ProductInputsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ProductInputsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProductInputsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + ProductInputsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ProductInputsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProductInputsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ProductInputsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProductInputsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ProductInputsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ProductInputsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ProductInputsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ProductInputsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ProductInputsServiceClient._get_universe_domain(None, None) + == ProductInputsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ProductInputsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProductInputsServiceClient, "grpc"), + (ProductInputsServiceAsyncClient, "grpc_asyncio"), + (ProductInputsServiceClient, "rest"), + ], +) +def test_product_inputs_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ProductInputsServiceGrpcTransport, "grpc"), + (transports.ProductInputsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ProductInputsServiceRestTransport, "rest"), + ], +) +def test_product_inputs_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProductInputsServiceClient, "grpc"), + (ProductInputsServiceAsyncClient, "grpc_asyncio"), + (ProductInputsServiceClient, "rest"), + ], +) +def test_product_inputs_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_product_inputs_service_client_get_transport_class(): + transport = ProductInputsServiceClient.get_transport_class() + available_transports = [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceRestTransport, + ] + assert transport in available_transports + + transport = ProductInputsServiceClient.get_transport_class("grpc") + assert transport == transports.ProductInputsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + ), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + ProductInputsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceClient), +) +@mock.patch.object( + ProductInputsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceAsyncClient), +) +def test_product_inputs_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProductInputsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProductInputsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + "true", + ), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + "false", + ), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceRestTransport, + "rest", + "true", + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ProductInputsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceClient), +) +@mock.patch.object( + ProductInputsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_product_inputs_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ProductInputsServiceClient, ProductInputsServiceAsyncClient] +) +@mock.patch.object( + ProductInputsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProductInputsServiceClient), +) +@mock.patch.object( + ProductInputsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProductInputsServiceAsyncClient), +) +def test_product_inputs_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ProductInputsServiceClient, ProductInputsServiceAsyncClient] +) +@mock.patch.object( + ProductInputsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceClient), +) +@mock.patch.object( + ProductInputsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductInputsServiceAsyncClient), +) +def test_product_inputs_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ProductInputsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProductInputsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProductInputsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + ), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceRestTransport, + "rest", + ), + ], +) +def test_product_inputs_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ProductInputsServiceClient, + transports.ProductInputsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_product_inputs_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_product_inputs_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_products_v1beta.services.product_inputs_service.transports.ProductInputsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ProductInputsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProductInputsServiceClient, + transports.ProductInputsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_product_inputs_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + productinputs.InsertProductInputRequest, + dict, + ], +) +def test_insert_product_input(request_type, transport: str = "grpc"): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = productinputs.ProductInput( + name="name_value", + product="product_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + version_number=1518, + ) + response = client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = productinputs.InsertProductInputRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, productinputs.ProductInput) + assert response.name == "name_value" + assert response.product == "product_value" + assert response.channel == types.Channel.ChannelEnum.ONLINE + assert response.offer_id == "offer_id_value" + assert response.content_language == "content_language_value" + assert response.feed_label == "feed_label_value" + assert response.version_number == 1518 + + +def test_insert_product_input_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.insert_product_input() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == productinputs.InsertProductInputRequest() + + +def test_insert_product_input_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = productinputs.InsertProductInputRequest( + parent="parent_value", + data_source="data_source_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.insert_product_input(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == productinputs.InsertProductInputRequest( + parent="parent_value", + data_source="data_source_value", + ) + + +def test_insert_product_input_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.insert_product_input in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.insert_product_input + ] = mock_rpc + request = {} + client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.insert_product_input(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_insert_product_input_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + productinputs.ProductInput( + name="name_value", + product="product_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + version_number=1518, + ) + ) + response = await client.insert_product_input() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == productinputs.InsertProductInputRequest() + + +@pytest.mark.asyncio +async def test_insert_product_input_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.insert_product_input + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.insert_product_input + ] = mock_object + + request = {} + await client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.insert_product_input(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_insert_product_input_async( + transport: str = "grpc_asyncio", + request_type=productinputs.InsertProductInputRequest, +): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + productinputs.ProductInput( + name="name_value", + product="product_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + version_number=1518, + ) + ) + response = await client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = productinputs.InsertProductInputRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, productinputs.ProductInput) + assert response.name == "name_value" + assert response.product == "product_value" + assert response.channel == types.Channel.ChannelEnum.ONLINE + assert response.offer_id == "offer_id_value" + assert response.content_language == "content_language_value" + assert response.feed_label == "feed_label_value" + assert response.version_number == 1518 + + +@pytest.mark.asyncio +async def test_insert_product_input_async_from_dict(): + await test_insert_product_input_async(request_type=dict) + + +def test_insert_product_input_field_headers(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = productinputs.InsertProductInputRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + call.return_value = productinputs.ProductInput() + client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_insert_product_input_field_headers_async(): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = productinputs.InsertProductInputRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.insert_product_input), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + productinputs.ProductInput() + ) + await client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + productinputs.DeleteProductInputRequest, + dict, + ], +) +def test_delete_product_input(request_type, transport: str = "grpc"): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = productinputs.DeleteProductInputRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_product_input_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_product_input() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == productinputs.DeleteProductInputRequest() + + +def test_delete_product_input_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = productinputs.DeleteProductInputRequest( + name="name_value", + data_source="data_source_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_product_input(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == productinputs.DeleteProductInputRequest( + name="name_value", + data_source="data_source_value", + ) + + +def test_delete_product_input_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_product_input in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_product_input + ] = mock_rpc + request = {} + client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_product_input(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_product_input_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_product_input() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == productinputs.DeleteProductInputRequest() + + +@pytest.mark.asyncio +async def test_delete_product_input_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_product_input + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_product_input + ] = mock_object + + request = {} + await client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_product_input(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_product_input_async( + transport: str = "grpc_asyncio", + request_type=productinputs.DeleteProductInputRequest, +): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = productinputs.DeleteProductInputRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_product_input_async_from_dict(): + await test_delete_product_input_async(request_type=dict) + + +def test_delete_product_input_field_headers(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = productinputs.DeleteProductInputRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + call.return_value = None + client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_product_input_field_headers_async(): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = productinputs.DeleteProductInputRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_product_input_flattened(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_product_input( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_product_input_flattened_error(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_product_input( + productinputs.DeleteProductInputRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_product_input_flattened_async(): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_product_input), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_product_input( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_product_input_flattened_error_async(): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_product_input( + productinputs.DeleteProductInputRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + productinputs.InsertProductInputRequest, + dict, + ], +) +def test_insert_product_input_rest(request_type): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request_init["product_input"] = { + "name": "name_value", + "product": "product_value", + "channel": 1, + "offer_id": "offer_id_value", + "content_language": "content_language_value", + "feed_label": "feed_label_value", + "version_number": 1518, + "attributes": { + "identifier_exists": True, + "is_bundle": True, + "title": "title_value", + "description": "description_value", + "link": "link_value", + "mobile_link": "mobile_link_value", + "canonical_link": "canonical_link_value", + "image_link": "image_link_value", + "additional_image_links": [ + "additional_image_links_value1", + "additional_image_links_value2", + ], + "expiration_date": {"seconds": 751, "nanos": 543}, + "disclosure_date": {}, + "adult": True, + "age_group": "age_group_value", + "availability": "availability_value", + "availability_date": {}, + "brand": "brand_value", + "color": "color_value", + "condition": "condition_value", + "gender": "gender_value", + "google_product_category": "google_product_category_value", + "gtin": "gtin_value", + "item_group_id": "item_group_id_value", + "material": "material_value", + "mpn": "mpn_value", + "pattern": "pattern_value", + "price": {"amount_micros": 1408, "currency_code": "currency_code_value"}, + "installment": { + "months": 665, + "amount": {}, + "downpayment": {}, + "credit_type": "credit_type_value", + }, + "subscription_cost": {"period": 1, "period_length": 1380, "amount": {}}, + "loyalty_points": { + "name": "name_value", + "points_value": 1305, + "ratio": 0.543, + }, + "loyalty_programs": [ + { + "program_label": "program_label_value", + "tier_label": "tier_label_value", + "price": {}, + "cashback_for_future_use": {}, + "loyalty_points": 1546, + } + ], + "product_types": ["product_types_value1", "product_types_value2"], + "sale_price": {}, + "sale_price_effective_date": {"start_time": {}, "end_time": {}}, + "sell_on_google_quantity": 2470, + "product_height": {"value": 0.541, "unit": "unit_value"}, + "product_length": {}, + "product_width": {}, + "product_weight": {"value": 0.541, "unit": "unit_value"}, + "shipping": [ + { + "price": {}, + "country": "country_value", + "region": "region_value", + "service": "service_value", + "location_id": 1157, + "location_group_name": "location_group_name_value", + "postal_code": "postal_code_value", + "min_handling_time": 1782, + "max_handling_time": 1784, + "min_transit_time": 1718, + "max_transit_time": 1720, + } + ], + "free_shipping_threshold": [ + {"country": "country_value", "price_threshold": {}} + ], + "shipping_weight": {"value": 0.541, "unit": "unit_value"}, + "shipping_length": {"value": 0.541, "unit": "unit_value"}, + "shipping_width": {}, + "shipping_height": {}, + "max_handling_time": 1784, + "min_handling_time": 1782, + "shipping_label": "shipping_label_value", + "transit_time_label": "transit_time_label_value", + "size": "size_value", + "size_system": "size_system_value", + "size_types": ["size_types_value1", "size_types_value2"], + "taxes": [ + { + "rate": 0.428, + "country": "country_value", + "region": "region_value", + "tax_ship": True, + "location_id": 1157, + "postal_code": "postal_code_value", + } + ], + "tax_category": "tax_category_value", + "energy_efficiency_class": "energy_efficiency_class_value", + "min_energy_efficiency_class": "min_energy_efficiency_class_value", + "max_energy_efficiency_class": "max_energy_efficiency_class_value", + "unit_pricing_measure": {"value": 0.541, "unit": "unit_value"}, + "unit_pricing_base_measure": {"value": 541, "unit": "unit_value"}, + "multipack": 970, + "ads_grouping": "ads_grouping_value", + "ads_labels": ["ads_labels_value1", "ads_labels_value2"], + "ads_redirect": "ads_redirect_value", + "cost_of_goods_sold": {}, + "product_details": [ + { + "section_name": "section_name_value", + "attribute_name": "attribute_name_value", + "attribute_value": "attribute_value_value", + } + ], + "product_highlights": [ + "product_highlights_value1", + "product_highlights_value2", + ], + "display_ads_id": "display_ads_id_value", + "display_ads_similar_ids": [ + "display_ads_similar_ids_value1", + "display_ads_similar_ids_value2", + ], + "display_ads_title": "display_ads_title_value", + "display_ads_link": "display_ads_link_value", + "display_ads_value": 0.1801, + "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "pickup_method": "pickup_method_value", + "pickup_sla": "pickup_sla_value", + "link_template": "link_template_value", + "mobile_link_template": "mobile_link_template_value", + "custom_label_0": "custom_label_0_value", + "custom_label_1": "custom_label_1_value", + "custom_label_2": "custom_label_2_value", + "custom_label_3": "custom_label_3_value", + "custom_label_4": "custom_label_4_value", + "included_destinations": [ + "included_destinations_value1", + "included_destinations_value2", + ], + "excluded_destinations": [ + "excluded_destinations_value1", + "excluded_destinations_value2", + ], + "shopping_ads_excluded_countries": [ + "shopping_ads_excluded_countries_value1", + "shopping_ads_excluded_countries_value2", + ], + "external_seller_id": "external_seller_id_value", + "pause": "pause_value", + "lifestyle_image_links": [ + "lifestyle_image_links_value1", + "lifestyle_image_links_value2", + ], + "cloud_export_additional_properties": [ + { + "property_name": "property_name_value", + "text_value": ["text_value_value1", "text_value_value2"], + "bool_value": True, + "int_value": [968, 969], + "float_value": [0.11710000000000001, 0.11720000000000001], + "min_value": 0.96, + "max_value": 0.962, + "unit_code": "unit_code_value", + } + ], + "virtual_model_link": "virtual_model_link_value", + "certifications": [ + { + "certification_authority": "certification_authority_value", + "certification_name": "certification_name_value", + "certification_code": "certification_code_value", + "certification_value": "certification_value_value", + } + ], + "structured_title": { + "digital_source_type": "digital_source_type_value", + "content": "content_value", + }, + "structured_description": { + "digital_source_type": "digital_source_type_value", + "content": "content_value", + }, + "auto_pricing_min_price": {}, + }, + "custom_attributes": [ + {"name": "name_value", "value": "value_value", "group_values": {}} + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = productinputs.InsertProductInputRequest.meta.fields["product_input"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["product_input"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["product_input"][field])): + del request_init["product_input"][field][i][subfield] + else: + del request_init["product_input"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = productinputs.ProductInput( + name="name_value", + product="product_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + version_number=1518, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = productinputs.ProductInput.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_product_input(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, productinputs.ProductInput) + assert response.name == "name_value" + assert response.product == "product_value" + assert response.channel == types.Channel.ChannelEnum.ONLINE + assert response.offer_id == "offer_id_value" + assert response.content_language == "content_language_value" + assert response.feed_label == "feed_label_value" + assert response.version_number == 1518 + + +def test_insert_product_input_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.insert_product_input in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.insert_product_input + ] = mock_rpc + + request = {} + client.insert_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.insert_product_input(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_product_input_rest_required_fields( + request_type=productinputs.InsertProductInputRequest, +): + transport_class = transports.ProductInputsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_source"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "dataSource" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert_product_input._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataSource" in jsonified_request + assert jsonified_request["dataSource"] == request_init["data_source"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["dataSource"] = "data_source_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert_product_input._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_source",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "dataSource" in jsonified_request + assert jsonified_request["dataSource"] == "data_source_value" + + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = productinputs.ProductInput() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = productinputs.ProductInput.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_product_input(request) + + expected_params = [ + ( + "dataSource", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_product_input_rest_unset_required_fields(): + transport = transports.ProductInputsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert_product_input._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dataSource",)) + & set( + ( + "parent", + "productInput", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_product_input_rest_interceptors(null_interceptor): + transport = transports.ProductInputsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProductInputsServiceRestInterceptor(), + ) + client = ProductInputsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProductInputsServiceRestInterceptor, "post_insert_product_input" + ) as post, mock.patch.object( + transports.ProductInputsServiceRestInterceptor, "pre_insert_product_input" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = productinputs.InsertProductInputRequest.pb( + productinputs.InsertProductInputRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = productinputs.ProductInput.to_json( + productinputs.ProductInput() + ) + + request = productinputs.InsertProductInputRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = productinputs.ProductInput() + + client.insert_product_input( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_product_input_rest_bad_request( + transport: str = "rest", request_type=productinputs.InsertProductInputRequest +): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_product_input(request) + + +def test_insert_product_input_rest_error(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + productinputs.DeleteProductInputRequest, + dict, + ], +) +def test_delete_product_input_rest(request_type): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/productInputs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_product_input(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_product_input_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_product_input in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_product_input + ] = mock_rpc + + request = {} + client.delete_product_input(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_product_input(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_product_input_rest_required_fields( + request_type=productinputs.DeleteProductInputRequest, +): + transport_class = transports.ProductInputsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["data_source"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "dataSource" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_product_input._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataSource" in jsonified_request + assert jsonified_request["dataSource"] == request_init["data_source"] + + jsonified_request["name"] = "name_value" + jsonified_request["dataSource"] = "data_source_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_product_input._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_source",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "dataSource" in jsonified_request + assert jsonified_request["dataSource"] == "data_source_value" + + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_product_input(request) + + expected_params = [ + ( + "dataSource", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_product_input_rest_unset_required_fields(): + transport = transports.ProductInputsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_product_input._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dataSource",)) + & set( + ( + "name", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_product_input_rest_interceptors(null_interceptor): + transport = transports.ProductInputsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProductInputsServiceRestInterceptor(), + ) + client = ProductInputsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProductInputsServiceRestInterceptor, "pre_delete_product_input" + ) as pre: + pre.assert_not_called() + pb_message = productinputs.DeleteProductInputRequest.pb( + productinputs.DeleteProductInputRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = productinputs.DeleteProductInputRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_product_input( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_product_input_rest_bad_request( + transport: str = "rest", request_type=productinputs.DeleteProductInputRequest +): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/productInputs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_product_input(request) + + +def test_delete_product_input_rest_flattened(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/productInputs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_product_input(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/products/v1beta/{name=accounts/*/productInputs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_product_input_rest_flattened_error(transport: str = "rest"): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_product_input( + productinputs.DeleteProductInputRequest(), + name="name_value", + ) + + +def test_delete_product_input_rest_error(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProductInputsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProductInputsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProductInputsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProductInputsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProductInputsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProductInputsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProductInputsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProductInputsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProductInputsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProductInputsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProductInputsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ProductInputsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceGrpcAsyncIOTransport, + transports.ProductInputsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProductInputsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ProductInputsServiceGrpcTransport, + ) + + +def test_product_inputs_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProductInputsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_product_inputs_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_products_v1beta.services.product_inputs_service.transports.ProductInputsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProductInputsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "insert_product_input", + "delete_product_input", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_product_inputs_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_products_v1beta.services.product_inputs_service.transports.ProductInputsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProductInputsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_product_inputs_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_products_v1beta.services.product_inputs_service.transports.ProductInputsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProductInputsServiceTransport() + adc.assert_called_once() + + +def test_product_inputs_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProductInputsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceGrpcAsyncIOTransport, + ], +) +def test_product_inputs_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceGrpcAsyncIOTransport, + transports.ProductInputsServiceRestTransport, + ], +) +def test_product_inputs_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ProductInputsServiceGrpcTransport, grpc_helpers), + (transports.ProductInputsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_product_inputs_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceGrpcAsyncIOTransport, + ], +) +def test_product_inputs_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_product_inputs_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProductInputsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_product_inputs_service_host_no_port(transport_name): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_product_inputs_service_host_with_port(transport_name): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_product_inputs_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProductInputsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProductInputsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.insert_product_input._session + session2 = client2.transport.insert_product_input._session + assert session1 != session2 + session1 = client1.transport.delete_product_input._session + session2 = client2.transport.delete_product_input._session + assert session1 != session2 + + +def test_product_inputs_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProductInputsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_product_inputs_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProductInputsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceGrpcAsyncIOTransport, + ], +) +def test_product_inputs_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductInputsServiceGrpcTransport, + transports.ProductInputsServiceGrpcAsyncIOTransport, + ], +) +def test_product_inputs_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_product_path(): + account = "squid" + product = "clam" + expected = "accounts/{account}/products/{product}".format( + account=account, + product=product, + ) + actual = ProductInputsServiceClient.product_path(account, product) + assert expected == actual + + +def test_parse_product_path(): + expected = { + "account": "whelk", + "product": "octopus", + } + path = ProductInputsServiceClient.product_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_product_path(path) + assert expected == actual + + +def test_product_input_path(): + account = "oyster" + productinput = "nudibranch" + expected = "accounts/{account}/productInputs/{productinput}".format( + account=account, + productinput=productinput, + ) + actual = ProductInputsServiceClient.product_input_path(account, productinput) + assert expected == actual + + +def test_parse_product_input_path(): + expected = { + "account": "cuttlefish", + "productinput": "mussel", + } + path = ProductInputsServiceClient.product_input_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_product_input_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProductInputsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = ProductInputsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProductInputsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = ProductInputsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProductInputsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = ProductInputsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = ProductInputsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = ProductInputsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProductInputsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = ProductInputsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProductInputsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ProductInputsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProductInputsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProductInputsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ProductInputsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ProductInputsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ProductInputsServiceClient, transports.ProductInputsServiceGrpcTransport), + ( + ProductInputsServiceAsyncClient, + transports.ProductInputsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py new file mode 100644 index 000000000000..94fa07a52734 --- /dev/null +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py @@ -0,0 +1,3537 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.shopping.type.types import types +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_products_v1beta.services.products_service import ( + ProductsServiceAsyncClient, + ProductsServiceClient, + pagers, + transports, +) +from google.shopping.merchant_products_v1beta.types import products, products_common + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProductsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ProductsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ProductsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ProductsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProductsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProductsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ProductsServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ProductsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ProductsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ProductsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ProductsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ProductsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ProductsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ProductsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ProductsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ProductsServiceClient._get_client_cert_source(None, False) is None + assert ( + ProductsServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ProductsServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ProductsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ProductsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ProductsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceClient), +) +@mock.patch.object( + ProductsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ProductsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProductsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProductsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ProductsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ProductsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ProductsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProductsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ProductsServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ProductsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProductsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ProductsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProductsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ProductsServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ProductsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ProductsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ProductsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ProductsServiceClient._get_universe_domain(None, None) + == ProductsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ProductsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProductsServiceClient, transports.ProductsServiceGrpcTransport, "grpc"), + (ProductsServiceClient, transports.ProductsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProductsServiceClient, "grpc"), + (ProductsServiceAsyncClient, "grpc_asyncio"), + (ProductsServiceClient, "rest"), + ], +) +def test_products_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ProductsServiceGrpcTransport, "grpc"), + (transports.ProductsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ProductsServiceRestTransport, "rest"), + ], +) +def test_products_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProductsServiceClient, "grpc"), + (ProductsServiceAsyncClient, "grpc_asyncio"), + (ProductsServiceClient, "rest"), + ], +) +def test_products_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_products_service_client_get_transport_class(): + transport = ProductsServiceClient.get_transport_class() + available_transports = [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceRestTransport, + ] + assert transport in available_transports + + transport = ProductsServiceClient.get_transport_class("grpc") + assert transport == transports.ProductsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProductsServiceClient, transports.ProductsServiceGrpcTransport, "grpc"), + ( + ProductsServiceAsyncClient, + transports.ProductsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProductsServiceClient, transports.ProductsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ProductsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceClient), +) +@mock.patch.object( + ProductsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceAsyncClient), +) +def test_products_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProductsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProductsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ProductsServiceClient, + transports.ProductsServiceGrpcTransport, + "grpc", + "true", + ), + ( + ProductsServiceAsyncClient, + transports.ProductsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ProductsServiceClient, + transports.ProductsServiceGrpcTransport, + "grpc", + "false", + ), + ( + ProductsServiceAsyncClient, + transports.ProductsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ProductsServiceClient, + transports.ProductsServiceRestTransport, + "rest", + "true", + ), + ( + ProductsServiceClient, + transports.ProductsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ProductsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceClient), +) +@mock.patch.object( + ProductsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_products_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ProductsServiceClient, ProductsServiceAsyncClient] +) +@mock.patch.object( + ProductsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProductsServiceClient), +) +@mock.patch.object( + ProductsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ProductsServiceAsyncClient), +) +def test_products_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ProductsServiceClient, ProductsServiceAsyncClient] +) +@mock.patch.object( + ProductsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceClient), +) +@mock.patch.object( + ProductsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProductsServiceAsyncClient), +) +def test_products_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ProductsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ProductsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProductsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProductsServiceClient, transports.ProductsServiceGrpcTransport, "grpc"), + ( + ProductsServiceAsyncClient, + transports.ProductsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ProductsServiceClient, transports.ProductsServiceRestTransport, "rest"), + ], +) +def test_products_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProductsServiceClient, + transports.ProductsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProductsServiceAsyncClient, + transports.ProductsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ProductsServiceClient, transports.ProductsServiceRestTransport, "rest", None), + ], +) +def test_products_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_products_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_products_v1beta.services.products_service.transports.ProductsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ProductsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ProductsServiceClient, + transports.ProductsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ProductsServiceAsyncClient, + transports.ProductsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_products_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + products.GetProductRequest, + dict, + ], +) +def test_get_product(request_type, transport: str = "grpc"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = products.Product( + name="name_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + data_source="data_source_value", + version_number=1518, + ) + response = client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = products.GetProductRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, products.Product) + assert response.name == "name_value" + assert response.channel == types.Channel.ChannelEnum.ONLINE + assert response.offer_id == "offer_id_value" + assert response.content_language == "content_language_value" + assert response.feed_label == "feed_label_value" + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +def test_get_product_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_product() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == products.GetProductRequest() + + +def test_get_product_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = products.GetProductRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_product(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == products.GetProductRequest( + name="name_value", + ) + + +def test_get_product_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_product in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_product] = mock_rpc + request = {} + client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_product(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_product_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + products.Product( + name="name_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + data_source="data_source_value", + version_number=1518, + ) + ) + response = await client.get_product() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == products.GetProductRequest() + + +@pytest.mark.asyncio +async def test_get_product_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_product + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_product + ] = mock_object + + request = {} + await client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_product(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_product_async( + transport: str = "grpc_asyncio", request_type=products.GetProductRequest +): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + products.Product( + name="name_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + data_source="data_source_value", + version_number=1518, + ) + ) + response = await client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = products.GetProductRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, products.Product) + assert response.name == "name_value" + assert response.channel == types.Channel.ChannelEnum.ONLINE + assert response.offer_id == "offer_id_value" + assert response.content_language == "content_language_value" + assert response.feed_label == "feed_label_value" + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +@pytest.mark.asyncio +async def test_get_product_async_from_dict(): + await test_get_product_async(request_type=dict) + + +def test_get_product_field_headers(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = products.GetProductRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + call.return_value = products.Product() + client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_product_field_headers_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = products.GetProductRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(products.Product()) + await client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_product_flattened(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = products.Product() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_product( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_product_flattened_error(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_product( + products.GetProductRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_product_flattened_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_product), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = products.Product() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(products.Product()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_product( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_product_flattened_error_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_product( + products.GetProductRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + products.ListProductsRequest, + dict, + ], +) +def test_list_products(request_type, transport: str = "grpc"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = products.ListProductsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = products.ListProductsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProductsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_products_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_products() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == products.ListProductsRequest() + + +def test_list_products_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = products.ListProductsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_products(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == products.ListProductsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_products_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_products] = mock_rpc + request = {} + client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_products_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + products.ListProductsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_products() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == products.ListProductsRequest() + + +@pytest.mark.asyncio +async def test_list_products_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_products + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_products + ] = mock_object + + request = {} + await client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_products_async( + transport: str = "grpc_asyncio", request_type=products.ListProductsRequest +): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + products.ListProductsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = products.ListProductsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProductsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_products_async_from_dict(): + await test_list_products_async(request_type=dict) + + +def test_list_products_field_headers(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = products.ListProductsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + call.return_value = products.ListProductsResponse() + client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_products_field_headers_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = products.ListProductsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + products.ListProductsResponse() + ) + await client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_products_flattened(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = products.ListProductsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_products( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_products_flattened_error(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_products( + products.ListProductsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_products_flattened_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = products.ListProductsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + products.ListProductsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_products( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_products_flattened_error_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_products( + products.ListProductsRequest(), + parent="parent_value", + ) + + +def test_list_products_pager(transport_name: str = "grpc"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + products.Product(), + ], + next_page_token="abc", + ), + products.ListProductsResponse( + products=[], + next_page_token="def", + ), + products.ListProductsResponse( + products=[ + products.Product(), + ], + next_page_token="ghi", + ), + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_products(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, products.Product) for i in results) + + +def test_list_products_pages(transport_name: str = "grpc"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_products), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + products.Product(), + ], + next_page_token="abc", + ), + products.ListProductsResponse( + products=[], + next_page_token="def", + ), + products.ListProductsResponse( + products=[ + products.Product(), + ], + next_page_token="ghi", + ), + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + ], + ), + RuntimeError, + ) + pages = list(client.list_products(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_products_async_pager(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_products), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + products.Product(), + ], + next_page_token="abc", + ), + products.ListProductsResponse( + products=[], + next_page_token="def", + ), + products.ListProductsResponse( + products=[ + products.Product(), + ], + next_page_token="ghi", + ), + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_products( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, products.Product) for i in responses) + + +@pytest.mark.asyncio +async def test_list_products_async_pages(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_products), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + products.Product(), + ], + next_page_token="abc", + ), + products.ListProductsResponse( + products=[], + next_page_token="def", + ), + products.ListProductsResponse( + products=[ + products.Product(), + ], + next_page_token="ghi", + ), + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_products(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + products.GetProductRequest, + dict, + ], +) +def test_get_product_rest(request_type): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/products/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = products.Product( + name="name_value", + channel=types.Channel.ChannelEnum.ONLINE, + offer_id="offer_id_value", + content_language="content_language_value", + feed_label="feed_label_value", + data_source="data_source_value", + version_number=1518, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = products.Product.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_product(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, products.Product) + assert response.name == "name_value" + assert response.channel == types.Channel.ChannelEnum.ONLINE + assert response.offer_id == "offer_id_value" + assert response.content_language == "content_language_value" + assert response.feed_label == "feed_label_value" + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +def test_get_product_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_product in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_product] = mock_rpc + + request = {} + client.get_product(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_product(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_product_rest_required_fields(request_type=products.GetProductRequest): + transport_class = transports.ProductsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_product._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_product._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = products.Product() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = products.Product.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_product(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_product_rest_unset_required_fields(): + transport = transports.ProductsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_product._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_product_rest_interceptors(null_interceptor): + transport = transports.ProductsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProductsServiceRestInterceptor(), + ) + client = ProductsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProductsServiceRestInterceptor, "post_get_product" + ) as post, mock.patch.object( + transports.ProductsServiceRestInterceptor, "pre_get_product" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = products.GetProductRequest.pb(products.GetProductRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = products.Product.to_json(products.Product()) + + request = products.GetProductRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = products.Product() + + client.get_product( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_product_rest_bad_request( + transport: str = "rest", request_type=products.GetProductRequest +): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/products/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_product(request) + + +def test_get_product_rest_flattened(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = products.Product() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/products/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = products.Product.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_product(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/products/v1beta/{name=accounts/*/products/*}" % client.transport._host, + args[1], + ) + + +def test_get_product_rest_flattened_error(transport: str = "rest"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_product( + products.GetProductRequest(), + name="name_value", + ) + + +def test_get_product_rest_error(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + products.ListProductsRequest, + dict, + ], +) +def test_list_products_rest(request_type): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = products.ListProductsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = products.ListProductsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_products(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProductsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_products_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_products] = mock_rpc + + request = {} + client.list_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_products_rest_required_fields(request_type=products.ListProductsRequest): + transport_class = transports.ProductsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_products._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_products._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = products.ListProductsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = products.ListProductsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_products(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_products_rest_unset_required_fields(): + transport = transports.ProductsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_products._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_products_rest_interceptors(null_interceptor): + transport = transports.ProductsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProductsServiceRestInterceptor(), + ) + client = ProductsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProductsServiceRestInterceptor, "post_list_products" + ) as post, mock.patch.object( + transports.ProductsServiceRestInterceptor, "pre_list_products" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = products.ListProductsRequest.pb(products.ListProductsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = products.ListProductsResponse.to_json( + products.ListProductsResponse() + ) + + request = products.ListProductsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = products.ListProductsResponse() + + client.list_products( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_products_rest_bad_request( + transport: str = "rest", request_type=products.ListProductsRequest +): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_products(request) + + +def test_list_products_rest_flattened(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = products.ListProductsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = products.ListProductsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_products(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/products/v1beta/{parent=accounts/*}/products" % client.transport._host, + args[1], + ) + + +def test_list_products_rest_flattened_error(transport: str = "rest"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_products( + products.ListProductsRequest(), + parent="parent_value", + ) + + +def test_list_products_rest_pager(transport: str = "rest"): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + products.Product(), + ], + next_page_token="abc", + ), + products.ListProductsResponse( + products=[], + next_page_token="def", + ), + products.ListProductsResponse( + products=[ + products.Product(), + ], + next_page_token="ghi", + ), + products.ListProductsResponse( + products=[ + products.Product(), + products.Product(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(products.ListProductsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_products(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, products.Product) for i in results) + + pages = list(client.list_products(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProductsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProductsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProductsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProductsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProductsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProductsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProductsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProductsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProductsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProductsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProductsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ProductsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceGrpcAsyncIOTransport, + transports.ProductsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProductsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ProductsServiceGrpcTransport, + ) + + +def test_products_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProductsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_products_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_products_v1beta.services.products_service.transports.ProductsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProductsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_product", + "list_products", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_products_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_products_v1beta.services.products_service.transports.ProductsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProductsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_products_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_products_v1beta.services.products_service.transports.ProductsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProductsServiceTransport() + adc.assert_called_once() + + +def test_products_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProductsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceGrpcAsyncIOTransport, + ], +) +def test_products_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceGrpcAsyncIOTransport, + transports.ProductsServiceRestTransport, + ], +) +def test_products_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ProductsServiceGrpcTransport, grpc_helpers), + (transports.ProductsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_products_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceGrpcAsyncIOTransport, + ], +) +def test_products_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_products_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProductsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_products_service_host_no_port(transport_name): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_products_service_host_with_port(transport_name): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_products_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProductsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProductsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_product._session + session2 = client2.transport.get_product._session + assert session1 != session2 + session1 = client1.transport.list_products._session + session2 = client2.transport.list_products._session + assert session1 != session2 + + +def test_products_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProductsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_products_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ProductsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceGrpcAsyncIOTransport, + ], +) +def test_products_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProductsServiceGrpcTransport, + transports.ProductsServiceGrpcAsyncIOTransport, + ], +) +def test_products_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_product_path(): + account = "squid" + product = "clam" + expected = "accounts/{account}/products/{product}".format( + account=account, + product=product, + ) + actual = ProductsServiceClient.product_path(account, product) + assert expected == actual + + +def test_parse_product_path(): + expected = { + "account": "whelk", + "product": "octopus", + } + path = ProductsServiceClient.product_path(**expected) + + # Check that the path construction is reversible. + actual = ProductsServiceClient.parse_product_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProductsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ProductsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProductsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProductsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ProductsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProductsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProductsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ProductsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProductsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ProductsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ProductsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProductsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProductsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ProductsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProductsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ProductsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProductsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProductsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ProductsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ProductsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ProductsServiceClient, transports.ProductsServiceGrpcTransport), + (ProductsServiceAsyncClient, transports.ProductsServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-promotions/.OwlBot.yaml b/packages/google-shopping-merchant-promotions/.OwlBot.yaml new file mode 100644 index 000000000000..adcecd153ff2 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/shopping/merchant/promotions/(v.*)/.*-py + dest: /owl-bot-staging/google-shopping-merchant-promotions/$1 +api-name: google-shopping-merchant-promotions diff --git a/packages/google-shopping-merchant-promotions/.coveragerc b/packages/google-shopping-merchant-promotions/.coveragerc new file mode 100644 index 000000000000..dafc77479871 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/shopping/merchant_promotions/__init__.py + google/shopping/merchant_promotions/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-shopping-merchant-promotions/.flake8 b/packages/google-shopping-merchant-promotions/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-shopping-merchant-promotions/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-shopping-merchant-promotions/.gitignore b/packages/google-shopping-merchant-promotions/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-shopping-merchant-promotions/.repo-metadata.json b/packages/google-shopping-merchant-promotions/.repo-metadata.json new file mode 100644 index 000000000000..df13ffb47a11 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-shopping-merchant-promotions", + "name_pretty": "Merchant API", + "api_description": "Programmatically manage your Merchant Center accounts.", + "product_documentation": "https://developers.google.com/merchant/api", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-promotions/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-shopping-merchant-promotions", + "api_id": "promotions.googleapis.com", + "default_version": "v1beta", + "codeowner_team": "", + "api_shortname": "promotions" +} diff --git a/packages/google-shopping-merchant-promotions/CHANGELOG.md b/packages/google-shopping-merchant-promotions/CHANGELOG.md new file mode 100644 index 000000000000..3e27f04279af --- /dev/null +++ b/packages/google-shopping-merchant-promotions/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-06-05) + + +### Features + +* add initial files for google.shopping.merchant.promotions.v1beta ([#12775](https://github.com/googleapis/google-cloud-python/issues/12775)) ([f1cdefb](https://github.com/googleapis/google-cloud-python/commit/f1cdefb64b5b47e25e900333dbaebaa94d859f37)) + +## Changelog diff --git a/packages/google-shopping-merchant-promotions/CODE_OF_CONDUCT.md b/packages/google-shopping-merchant-promotions/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-shopping-merchant-promotions/CONTRIBUTING.rst b/packages/google-shopping-merchant-promotions/CONTRIBUTING.rst new file mode 100644 index 000000000000..db38f3e5c288 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-shopping-merchant-promotions + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-shopping-merchant-promotions/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-shopping-merchant-promotions/LICENSE b/packages/google-shopping-merchant-promotions/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-shopping-merchant-promotions/MANIFEST.in b/packages/google-shopping-merchant-promotions/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-shopping-merchant-promotions/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-shopping-merchant-promotions/README.rst b/packages/google-shopping-merchant-promotions/README.rst new file mode 100644 index 000000000000..1674328fa8ac --- /dev/null +++ b/packages/google-shopping-merchant-promotions/README.rst @@ -0,0 +1,108 @@ +Python Client for Merchant API +============================== + +|preview| |pypi| |versions| + +`Merchant API`_: Programmatically manage your Merchant Center accounts. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-shopping-merchant-promotions.svg + :target: https://pypi.org/project/google-shopping-merchant-promotions/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-shopping-merchant-promotions.svg + :target: https://pypi.org/project/google-shopping-merchant-promotions/ +.. _Merchant API: https://developers.google.com/merchant/api +.. _Client Library Documentation: https://googleapis.dev/python/google-shopping-merchant-promotions/latest +.. _Product Documentation: https://developers.google.com/merchant/api + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Merchant API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Merchant API.: https://developers.google.com/merchant/api +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-shopping-merchant-promotions + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-shopping-merchant-promotions + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Merchant API + to see other available methods on the client. +- Read the `Merchant API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Merchant API Product documentation: https://developers.google.com/merchant/api +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-shopping-merchant-promotions/docs/CHANGELOG.md b/packages/google-shopping-merchant-promotions/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-shopping-merchant-promotions/docs/README.rst b/packages/google-shopping-merchant-promotions/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-shopping-merchant-promotions/docs/_static/custom.css b/packages/google-shopping-merchant-promotions/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-shopping-merchant-promotions/docs/_templates/layout.html b/packages/google-shopping-merchant-promotions/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-shopping-merchant-promotions/docs/conf.py b/packages/google-shopping-merchant-promotions/docs/conf.py new file mode 100644 index 000000000000..507599599eaa --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-shopping-merchant-promotions documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-shopping-merchant-promotions" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-shopping-merchant-promotions", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-shopping-merchant-promotions-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-shopping-merchant-promotions.tex", + "google-shopping-merchant-promotions Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-shopping-merchant-promotions", + "google-shopping-merchant-promotions Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-shopping-merchant-promotions", + "google-shopping-merchant-promotions Documentation", + author, + "google-shopping-merchant-promotions", + "google-shopping-merchant-promotions Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-shopping-merchant-promotions/docs/index.rst b/packages/google-shopping-merchant-promotions/docs/index.rst new file mode 100644 index 000000000000..f62a455bad54 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + merchant_promotions_v1beta/services_ + merchant_promotions_v1beta/types_ + + +Changelog +--------- + +For a list of all ``google-shopping-merchant-promotions`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/promotions_service.rst b/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/promotions_service.rst new file mode 100644 index 000000000000..229477aebf39 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/promotions_service.rst @@ -0,0 +1,10 @@ +PromotionsService +----------------------------------- + +.. automodule:: google.shopping.merchant_promotions_v1beta.services.promotions_service + :members: + :inherited-members: + +.. automodule:: google.shopping.merchant_promotions_v1beta.services.promotions_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/services_.rst b/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/services_.rst new file mode 100644 index 000000000000..befbacf009f8 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/services_.rst @@ -0,0 +1,6 @@ +Services for Google Shopping Merchant Promotions v1beta API +=========================================================== +.. toctree:: + :maxdepth: 2 + + promotions_service diff --git a/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/types_.rst b/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/types_.rst new file mode 100644 index 000000000000..a95c03a1e319 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/merchant_promotions_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Shopping Merchant Promotions v1beta API +======================================================== + +.. automodule:: google.shopping.merchant_promotions_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-shopping-merchant-promotions/docs/multiprocessing.rst b/packages/google-shopping-merchant-promotions/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/__init__.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/__init__.py new file mode 100644 index 000000000000..4b4a72118e56 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/__init__.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_promotions import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.shopping.merchant_promotions_v1beta.services.promotions_service.async_client import ( + PromotionsServiceAsyncClient, +) +from google.shopping.merchant_promotions_v1beta.services.promotions_service.client import ( + PromotionsServiceClient, +) +from google.shopping.merchant_promotions_v1beta.types.promotions import ( + GetPromotionRequest, + InsertPromotionRequest, + ListPromotionsRequest, + ListPromotionsResponse, + Promotion, +) +from google.shopping.merchant_promotions_v1beta.types.promotions_common import ( + Attributes, + CouponValueType, + OfferType, + ProductApplicability, + PromotionStatus, + RedemptionChannel, + StoreApplicability, +) + +__all__ = ( + "PromotionsServiceClient", + "PromotionsServiceAsyncClient", + "GetPromotionRequest", + "InsertPromotionRequest", + "ListPromotionsRequest", + "ListPromotionsResponse", + "Promotion", + "Attributes", + "PromotionStatus", + "CouponValueType", + "OfferType", + "ProductApplicability", + "RedemptionChannel", + "StoreApplicability", +) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/py.typed b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/py.typed new file mode 100644 index 000000000000..53f2425ea9b3 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-promotions package uses inline types. diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/__init__.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/__init__.py new file mode 100644 index 000000000000..0c6e254cc8f3 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.merchant_promotions_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.promotions_service import ( + PromotionsServiceAsyncClient, + PromotionsServiceClient, +) +from .types.promotions import ( + GetPromotionRequest, + InsertPromotionRequest, + ListPromotionsRequest, + ListPromotionsResponse, + Promotion, +) +from .types.promotions_common import ( + Attributes, + CouponValueType, + OfferType, + ProductApplicability, + PromotionStatus, + RedemptionChannel, + StoreApplicability, +) + +__all__ = ( + "PromotionsServiceAsyncClient", + "Attributes", + "CouponValueType", + "GetPromotionRequest", + "InsertPromotionRequest", + "ListPromotionsRequest", + "ListPromotionsResponse", + "OfferType", + "ProductApplicability", + "Promotion", + "PromotionStatus", + "PromotionsServiceClient", + "RedemptionChannel", + "StoreApplicability", +) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..0f9f123e2e18 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_metadata.json @@ -0,0 +1,73 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.shopping.merchant_promotions_v1beta", + "protoPackage": "google.shopping.merchant.promotions.v1beta", + "schema": "1.0", + "services": { + "PromotionsService": { + "clients": { + "grpc": { + "libraryClient": "PromotionsServiceClient", + "rpcs": { + "GetPromotion": { + "methods": [ + "get_promotion" + ] + }, + "InsertPromotion": { + "methods": [ + "insert_promotion" + ] + }, + "ListPromotions": { + "methods": [ + "list_promotions" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PromotionsServiceAsyncClient", + "rpcs": { + "GetPromotion": { + "methods": [ + "get_promotion" + ] + }, + "InsertPromotion": { + "methods": [ + "insert_promotion" + ] + }, + "ListPromotions": { + "methods": [ + "list_promotions" + ] + } + } + }, + "rest": { + "libraryClient": "PromotionsServiceClient", + "rpcs": { + "GetPromotion": { + "methods": [ + "get_promotion" + ] + }, + "InsertPromotion": { + "methods": [ + "insert_promotion" + ] + }, + "ListPromotions": { + "methods": [ + "list_promotions" + ] + } + } + } + } + } + } +} diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/py.typed b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/py.typed new file mode 100644 index 000000000000..53f2425ea9b3 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-merchant-promotions package uses inline types. diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/__init__.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/__init__.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/__init__.py new file mode 100644 index 000000000000..2fd1725e5e75 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PromotionsServiceAsyncClient +from .client import PromotionsServiceClient + +__all__ = ( + "PromotionsServiceClient", + "PromotionsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py new file mode 100644 index 000000000000..8550aff0a580 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py @@ -0,0 +1,632 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_promotions_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.shopping.type.types import types + +from google.shopping.merchant_promotions_v1beta.services.promotions_service import ( + pagers, +) +from google.shopping.merchant_promotions_v1beta.types import ( + promotions, + promotions_common, +) + +from .client import PromotionsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PromotionsServiceTransport +from .transports.grpc_asyncio import PromotionsServiceGrpcAsyncIOTransport + + +class PromotionsServiceAsyncClient: + """Service to manage promotions for products.""" + + _client: PromotionsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PromotionsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PromotionsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PromotionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PromotionsServiceClient._DEFAULT_UNIVERSE + + promotion_path = staticmethod(PromotionsServiceClient.promotion_path) + parse_promotion_path = staticmethod(PromotionsServiceClient.parse_promotion_path) + common_billing_account_path = staticmethod( + PromotionsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PromotionsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PromotionsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PromotionsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PromotionsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PromotionsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PromotionsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PromotionsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PromotionsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PromotionsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PromotionsServiceAsyncClient: The constructed client. + """ + return PromotionsServiceClient.from_service_account_info.__func__(PromotionsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PromotionsServiceAsyncClient: The constructed client. + """ + return PromotionsServiceClient.from_service_account_file.__func__(PromotionsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PromotionsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PromotionsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PromotionsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(PromotionsServiceClient).get_transport_class, type(PromotionsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PromotionsServiceTransport, + Callable[..., PromotionsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the promotions service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PromotionsServiceTransport,Callable[..., PromotionsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PromotionsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PromotionsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def insert_promotion( + self, + request: Optional[Union[promotions.InsertPromotionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.Promotion: + r"""Inserts a promotion for your Merchant Center account. + If the promotion already exists, then it updates the + promotion instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_promotions_v1beta + + async def sample_insert_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceAsyncClient() + + # Initialize request argument(s) + promotion = merchant_promotions_v1beta.Promotion() + promotion.promotion_id = "promotion_id_value" + promotion.content_language = "content_language_value" + promotion.target_country = "target_country_value" + promotion.redemption_channel = ['ONLINE'] + + request = merchant_promotions_v1beta.InsertPromotionRequest( + parent="parent_value", + promotion=promotion, + data_source="data_source_value", + ) + + # Make the request + response = await client.insert_promotion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_promotions_v1beta.types.InsertPromotionRequest, dict]]): + The request object. Request message for the ``InsertPromotion`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_promotions_v1beta.types.Promotion: + Represents a promotion. See the following articles for + more details. + + Required promotion input attributes to pass data + validation checks are primarily defined below: + + \* [Promotions data + specification](\ https://support.google.com/merchants/answer/2906014) + \* [Local promotions data + specification](\ https://support.google.com/merchants/answer/10146130) + + After inserting, updating a promotion input, it may + take several minutes before the final promotion can + be retrieved. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, promotions.InsertPromotionRequest): + request = promotions.InsertPromotionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.insert_promotion + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_promotion( + self, + request: Optional[Union[promotions.GetPromotionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.Promotion: + r"""Retrieves the promotion from your Merchant Center + account. + After inserting or updating a promotion input, it may + take several minutes before the updated promotion can be + retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_promotions_v1beta + + async def sample_get_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.GetPromotionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_promotion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_promotions_v1beta.types.GetPromotionRequest, dict]]): + The request object. Request message for the ``GetPromotion`` method. + name (:class:`str`): + Required. The name of the promotion to retrieve. Format: + ``accounts/{account}/promotions/{promotions}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_promotions_v1beta.types.Promotion: + Represents a promotion. See the following articles for + more details. + + Required promotion input attributes to pass data + validation checks are primarily defined below: + + \* [Promotions data + specification](\ https://support.google.com/merchants/answer/2906014) + \* [Local promotions data + specification](\ https://support.google.com/merchants/answer/10146130) + + After inserting, updating a promotion input, it may + take several minutes before the final promotion can + be retrieved. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, promotions.GetPromotionRequest): + request = promotions.GetPromotionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_promotion + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_promotions( + self, + request: Optional[Union[promotions.ListPromotionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPromotionsAsyncPager: + r"""Lists the promotions in your Merchant Center account. The + response might contain fewer items than specified by + ``pageSize``. Rely on ``pageToken`` to determine if there are + more items to be requested. + + After inserting or updating a promotion, it may take several + minutes before the updated processed promotion can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_promotions_v1beta + + async def sample_list_promotions(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.ListPromotionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_promotions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_promotions_v1beta.types.ListPromotionsRequest, dict]]): + The request object. Request message for the ``ListPromotions`` method. + parent (:class:`str`): + Required. The account to list processed promotions for. + Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_promotions_v1beta.services.promotions_service.pagers.ListPromotionsAsyncPager: + Response message for the ListPromotions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, promotions.ListPromotionsRequest): + request = promotions.ListPromotionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_promotions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPromotionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PromotionsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PromotionsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py new file mode 100644 index 000000000000..027e378e0efa --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py @@ -0,0 +1,1047 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_promotions_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.shopping.type.types import types + +from google.shopping.merchant_promotions_v1beta.services.promotions_service import ( + pagers, +) +from google.shopping.merchant_promotions_v1beta.types import ( + promotions, + promotions_common, +) + +from .transports.base import DEFAULT_CLIENT_INFO, PromotionsServiceTransport +from .transports.grpc import PromotionsServiceGrpcTransport +from .transports.grpc_asyncio import PromotionsServiceGrpcAsyncIOTransport +from .transports.rest import PromotionsServiceRestTransport + + +class PromotionsServiceClientMeta(type): + """Metaclass for the PromotionsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PromotionsServiceTransport]] + _transport_registry["grpc"] = PromotionsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PromotionsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PromotionsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PromotionsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PromotionsServiceClient(metaclass=PromotionsServiceClientMeta): + """Service to manage promotions for products.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PromotionsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PromotionsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PromotionsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PromotionsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def promotion_path( + account: str, + promotion: str, + ) -> str: + """Returns a fully-qualified promotion string.""" + return "accounts/{account}/promotions/{promotion}".format( + account=account, + promotion=promotion, + ) + + @staticmethod + def parse_promotion_path(path: str) -> Dict[str, str]: + """Parses a promotion path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/promotions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PromotionsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PromotionsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PromotionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PromotionsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = PromotionsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or PromotionsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PromotionsServiceTransport, + Callable[..., PromotionsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the promotions service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PromotionsServiceTransport,Callable[..., PromotionsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PromotionsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PromotionsServiceClient._read_environment_variables() + self._client_cert_source = PromotionsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PromotionsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PromotionsServiceTransport) + if transport_provided: + # transport is a PromotionsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PromotionsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PromotionsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PromotionsServiceTransport], + Callable[..., PromotionsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PromotionsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def insert_promotion( + self, + request: Optional[Union[promotions.InsertPromotionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.Promotion: + r"""Inserts a promotion for your Merchant Center account. + If the promotion already exists, then it updates the + promotion instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_promotions_v1beta + + def sample_insert_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceClient() + + # Initialize request argument(s) + promotion = merchant_promotions_v1beta.Promotion() + promotion.promotion_id = "promotion_id_value" + promotion.content_language = "content_language_value" + promotion.target_country = "target_country_value" + promotion.redemption_channel = ['ONLINE'] + + request = merchant_promotions_v1beta.InsertPromotionRequest( + parent="parent_value", + promotion=promotion, + data_source="data_source_value", + ) + + # Make the request + response = client.insert_promotion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_promotions_v1beta.types.InsertPromotionRequest, dict]): + The request object. Request message for the ``InsertPromotion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_promotions_v1beta.types.Promotion: + Represents a promotion. See the following articles for + more details. + + Required promotion input attributes to pass data + validation checks are primarily defined below: + + \* [Promotions data + specification](\ https://support.google.com/merchants/answer/2906014) + \* [Local promotions data + specification](\ https://support.google.com/merchants/answer/10146130) + + After inserting, updating a promotion input, it may + take several minutes before the final promotion can + be retrieved. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, promotions.InsertPromotionRequest): + request = promotions.InsertPromotionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert_promotion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_promotion( + self, + request: Optional[Union[promotions.GetPromotionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.Promotion: + r"""Retrieves the promotion from your Merchant Center + account. + After inserting or updating a promotion input, it may + take several minutes before the updated promotion can be + retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_promotions_v1beta + + def sample_get_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.GetPromotionRequest( + name="name_value", + ) + + # Make the request + response = client.get_promotion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_promotions_v1beta.types.GetPromotionRequest, dict]): + The request object. Request message for the ``GetPromotion`` method. + name (str): + Required. The name of the promotion to retrieve. Format: + ``accounts/{account}/promotions/{promotions}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_promotions_v1beta.types.Promotion: + Represents a promotion. See the following articles for + more details. + + Required promotion input attributes to pass data + validation checks are primarily defined below: + + \* [Promotions data + specification](\ https://support.google.com/merchants/answer/2906014) + \* [Local promotions data + specification](\ https://support.google.com/merchants/answer/10146130) + + After inserting, updating a promotion input, it may + take several minutes before the final promotion can + be retrieved. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, promotions.GetPromotionRequest): + request = promotions.GetPromotionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_promotion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_promotions( + self, + request: Optional[Union[promotions.ListPromotionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPromotionsPager: + r"""Lists the promotions in your Merchant Center account. The + response might contain fewer items than specified by + ``pageSize``. Rely on ``pageToken`` to determine if there are + more items to be requested. + + After inserting or updating a promotion, it may take several + minutes before the updated processed promotion can be retrieved. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_promotions_v1beta + + def sample_list_promotions(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.ListPromotionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_promotions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.shopping.merchant_promotions_v1beta.types.ListPromotionsRequest, dict]): + The request object. Request message for the ``ListPromotions`` method. + parent (str): + Required. The account to list processed promotions for. + Format: ``accounts/{account}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_promotions_v1beta.services.promotions_service.pagers.ListPromotionsPager: + Response message for the ListPromotions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, promotions.ListPromotionsRequest): + request = promotions.ListPromotionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_promotions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPromotionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PromotionsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PromotionsServiceClient",) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/pagers.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/pagers.py new file mode 100644 index 000000000000..e263299df628 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.shopping.merchant_promotions_v1beta.types import promotions + + +class ListPromotionsPager: + """A pager for iterating through ``list_promotions`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_promotions_v1beta.types.ListPromotionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``promotions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPromotions`` requests and continue to iterate + through the ``promotions`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_promotions_v1beta.types.ListPromotionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., promotions.ListPromotionsResponse], + request: promotions.ListPromotionsRequest, + response: promotions.ListPromotionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_promotions_v1beta.types.ListPromotionsRequest): + The initial request object. + response (google.shopping.merchant_promotions_v1beta.types.ListPromotionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = promotions.ListPromotionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[promotions.ListPromotionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[promotions.Promotion]: + for page in self.pages: + yield from page.promotions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPromotionsAsyncPager: + """A pager for iterating through ``list_promotions`` requests. + + This class thinly wraps an initial + :class:`google.shopping.merchant_promotions_v1beta.types.ListPromotionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``promotions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPromotions`` requests and continue to iterate + through the ``promotions`` field on the + corresponding responses. + + All the usual :class:`google.shopping.merchant_promotions_v1beta.types.ListPromotionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[promotions.ListPromotionsResponse]], + request: promotions.ListPromotionsRequest, + response: promotions.ListPromotionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.shopping.merchant_promotions_v1beta.types.ListPromotionsRequest): + The initial request object. + response (google.shopping.merchant_promotions_v1beta.types.ListPromotionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = promotions.ListPromotionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[promotions.ListPromotionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[promotions.Promotion]: + async def async_generator(): + async for page in self.pages: + for response in page.promotions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/__init__.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/__init__.py new file mode 100644 index 000000000000..4a1ef25a1981 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PromotionsServiceTransport +from .grpc import PromotionsServiceGrpcTransport +from .grpc_asyncio import PromotionsServiceGrpcAsyncIOTransport +from .rest import PromotionsServiceRestInterceptor, PromotionsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PromotionsServiceTransport]] +_transport_registry["grpc"] = PromotionsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PromotionsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PromotionsServiceRestTransport + +__all__ = ( + "PromotionsServiceTransport", + "PromotionsServiceGrpcTransport", + "PromotionsServiceGrpcAsyncIOTransport", + "PromotionsServiceRestTransport", + "PromotionsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/base.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/base.py new file mode 100644 index 000000000000..9a5b6991cc00 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/base.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_promotions_v1beta import gapic_version as package_version +from google.shopping.merchant_promotions_v1beta.types import promotions + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PromotionsServiceTransport(abc.ABC): + """Abstract transport class for PromotionsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.insert_promotion: gapic_v1.method.wrap_method( + self.insert_promotion, + default_timeout=None, + client_info=client_info, + ), + self.get_promotion: gapic_v1.method.wrap_method( + self.get_promotion, + default_timeout=None, + client_info=client_info, + ), + self.list_promotions: gapic_v1.method.wrap_method( + self.list_promotions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def insert_promotion( + self, + ) -> Callable[ + [promotions.InsertPromotionRequest], + Union[promotions.Promotion, Awaitable[promotions.Promotion]], + ]: + raise NotImplementedError() + + @property + def get_promotion( + self, + ) -> Callable[ + [promotions.GetPromotionRequest], + Union[promotions.Promotion, Awaitable[promotions.Promotion]], + ]: + raise NotImplementedError() + + @property + def list_promotions( + self, + ) -> Callable[ + [promotions.ListPromotionsRequest], + Union[ + promotions.ListPromotionsResponse, + Awaitable[promotions.ListPromotionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PromotionsServiceTransport",) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/grpc.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/grpc.py new file mode 100644 index 000000000000..eabb86f93ac1 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/grpc.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_promotions_v1beta.types import promotions + +from .base import DEFAULT_CLIENT_INFO, PromotionsServiceTransport + + +class PromotionsServiceGrpcTransport(PromotionsServiceTransport): + """gRPC backend transport for PromotionsService. + + Service to manage promotions for products. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def insert_promotion( + self, + ) -> Callable[[promotions.InsertPromotionRequest], promotions.Promotion]: + r"""Return a callable for the insert promotion method over gRPC. + + Inserts a promotion for your Merchant Center account. + If the promotion already exists, then it updates the + promotion instead. + + Returns: + Callable[[~.InsertPromotionRequest], + ~.Promotion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "insert_promotion" not in self._stubs: + self._stubs["insert_promotion"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.promotions.v1beta.PromotionsService/InsertPromotion", + request_serializer=promotions.InsertPromotionRequest.serialize, + response_deserializer=promotions.Promotion.deserialize, + ) + return self._stubs["insert_promotion"] + + @property + def get_promotion( + self, + ) -> Callable[[promotions.GetPromotionRequest], promotions.Promotion]: + r"""Return a callable for the get promotion method over gRPC. + + Retrieves the promotion from your Merchant Center + account. + After inserting or updating a promotion input, it may + take several minutes before the updated promotion can be + retrieved. + + Returns: + Callable[[~.GetPromotionRequest], + ~.Promotion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_promotion" not in self._stubs: + self._stubs["get_promotion"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.promotions.v1beta.PromotionsService/GetPromotion", + request_serializer=promotions.GetPromotionRequest.serialize, + response_deserializer=promotions.Promotion.deserialize, + ) + return self._stubs["get_promotion"] + + @property + def list_promotions( + self, + ) -> Callable[ + [promotions.ListPromotionsRequest], promotions.ListPromotionsResponse + ]: + r"""Return a callable for the list promotions method over gRPC. + + Lists the promotions in your Merchant Center account. The + response might contain fewer items than specified by + ``pageSize``. Rely on ``pageToken`` to determine if there are + more items to be requested. + + After inserting or updating a promotion, it may take several + minutes before the updated processed promotion can be retrieved. + + Returns: + Callable[[~.ListPromotionsRequest], + ~.ListPromotionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_promotions" not in self._stubs: + self._stubs["list_promotions"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.promotions.v1beta.PromotionsService/ListPromotions", + request_serializer=promotions.ListPromotionsRequest.serialize, + response_deserializer=promotions.ListPromotionsResponse.deserialize, + ) + return self._stubs["list_promotions"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PromotionsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..36184dc8e6c9 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/grpc_asyncio.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_promotions_v1beta.types import promotions + +from .base import DEFAULT_CLIENT_INFO, PromotionsServiceTransport +from .grpc import PromotionsServiceGrpcTransport + + +class PromotionsServiceGrpcAsyncIOTransport(PromotionsServiceTransport): + """gRPC AsyncIO backend transport for PromotionsService. + + Service to manage promotions for products. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def insert_promotion( + self, + ) -> Callable[[promotions.InsertPromotionRequest], Awaitable[promotions.Promotion]]: + r"""Return a callable for the insert promotion method over gRPC. + + Inserts a promotion for your Merchant Center account. + If the promotion already exists, then it updates the + promotion instead. + + Returns: + Callable[[~.InsertPromotionRequest], + Awaitable[~.Promotion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "insert_promotion" not in self._stubs: + self._stubs["insert_promotion"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.promotions.v1beta.PromotionsService/InsertPromotion", + request_serializer=promotions.InsertPromotionRequest.serialize, + response_deserializer=promotions.Promotion.deserialize, + ) + return self._stubs["insert_promotion"] + + @property + def get_promotion( + self, + ) -> Callable[[promotions.GetPromotionRequest], Awaitable[promotions.Promotion]]: + r"""Return a callable for the get promotion method over gRPC. + + Retrieves the promotion from your Merchant Center + account. + After inserting or updating a promotion input, it may + take several minutes before the updated promotion can be + retrieved. + + Returns: + Callable[[~.GetPromotionRequest], + Awaitable[~.Promotion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_promotion" not in self._stubs: + self._stubs["get_promotion"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.promotions.v1beta.PromotionsService/GetPromotion", + request_serializer=promotions.GetPromotionRequest.serialize, + response_deserializer=promotions.Promotion.deserialize, + ) + return self._stubs["get_promotion"] + + @property + def list_promotions( + self, + ) -> Callable[ + [promotions.ListPromotionsRequest], Awaitable[promotions.ListPromotionsResponse] + ]: + r"""Return a callable for the list promotions method over gRPC. + + Lists the promotions in your Merchant Center account. The + response might contain fewer items than specified by + ``pageSize``. Rely on ``pageToken`` to determine if there are + more items to be requested. + + After inserting or updating a promotion, it may take several + minutes before the updated processed promotion can be retrieved. + + Returns: + Callable[[~.ListPromotionsRequest], + Awaitable[~.ListPromotionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_promotions" not in self._stubs: + self._stubs["list_promotions"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.promotions.v1beta.PromotionsService/ListPromotions", + request_serializer=promotions.ListPromotionsRequest.serialize, + response_deserializer=promotions.ListPromotionsResponse.deserialize, + ) + return self._stubs["list_promotions"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.insert_promotion: gapic_v1.method_async.wrap_method( + self.insert_promotion, + default_timeout=None, + client_info=client_info, + ), + self.get_promotion: gapic_v1.method_async.wrap_method( + self.get_promotion, + default_timeout=None, + client_info=client_info, + ), + self.list_promotions: gapic_v1.method_async.wrap_method( + self.list_promotions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PromotionsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/rest.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/rest.py new file mode 100644 index 000000000000..c0fc68529e1d --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/transports/rest.py @@ -0,0 +1,588 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_promotions_v1beta.types import promotions + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PromotionsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PromotionsServiceRestInterceptor: + """Interceptor for PromotionsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PromotionsServiceRestTransport. + + .. code-block:: python + class MyCustomPromotionsServiceInterceptor(PromotionsServiceRestInterceptor): + def pre_get_promotion(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_promotion(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert_promotion(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert_promotion(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_promotions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_promotions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PromotionsServiceRestTransport(interceptor=MyCustomPromotionsServiceInterceptor()) + client = PromotionsServiceClient(transport=transport) + + + """ + + def pre_get_promotion( + self, + request: promotions.GetPromotionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[promotions.GetPromotionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_promotion + + Override in a subclass to manipulate the request or metadata + before they are sent to the PromotionsService server. + """ + return request, metadata + + def post_get_promotion( + self, response: promotions.Promotion + ) -> promotions.Promotion: + """Post-rpc interceptor for get_promotion + + Override in a subclass to manipulate the response + after it is returned by the PromotionsService server but before + it is returned to user code. + """ + return response + + def pre_insert_promotion( + self, + request: promotions.InsertPromotionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[promotions.InsertPromotionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert_promotion + + Override in a subclass to manipulate the request or metadata + before they are sent to the PromotionsService server. + """ + return request, metadata + + def post_insert_promotion( + self, response: promotions.Promotion + ) -> promotions.Promotion: + """Post-rpc interceptor for insert_promotion + + Override in a subclass to manipulate the response + after it is returned by the PromotionsService server but before + it is returned to user code. + """ + return response + + def pre_list_promotions( + self, + request: promotions.ListPromotionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[promotions.ListPromotionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_promotions + + Override in a subclass to manipulate the request or metadata + before they are sent to the PromotionsService server. + """ + return request, metadata + + def post_list_promotions( + self, response: promotions.ListPromotionsResponse + ) -> promotions.ListPromotionsResponse: + """Post-rpc interceptor for list_promotions + + Override in a subclass to manipulate the response + after it is returned by the PromotionsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PromotionsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PromotionsServiceRestInterceptor + + +class PromotionsServiceRestTransport(PromotionsServiceTransport): + """REST backend transport for PromotionsService. + + Service to manage promotions for products. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PromotionsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PromotionsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetPromotion(PromotionsServiceRestStub): + def __hash__(self): + return hash("GetPromotion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: promotions.GetPromotionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.Promotion: + r"""Call the get promotion method over HTTP. + + Args: + request (~.promotions.GetPromotionRequest): + The request object. Request message for the ``GetPromotion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.promotions.Promotion: + Represents a promotion. See the following articles for + more details. + + Required promotion input attributes to pass data + validation checks are primarily defined below: + + - `Promotions data + specification `__ + - `Local promotions data + specification `__ + + After inserting, updating a promotion input, it may take + several minutes before the final promotion can be + retrieved. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/promotions/v1beta/{name=accounts/*/promotions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_promotion(request, metadata) + pb_request = promotions.GetPromotionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = promotions.Promotion() + pb_resp = promotions.Promotion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_promotion(resp) + return resp + + class _InsertPromotion(PromotionsServiceRestStub): + def __hash__(self): + return hash("InsertPromotion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: promotions.InsertPromotionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.Promotion: + r"""Call the insert promotion method over HTTP. + + Args: + request (~.promotions.InsertPromotionRequest): + The request object. Request message for the ``InsertPromotion`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.promotions.Promotion: + Represents a promotion. See the following articles for + more details. + + Required promotion input attributes to pass data + validation checks are primarily defined below: + + - `Promotions data + specification `__ + - `Local promotions data + specification `__ + + After inserting, updating a promotion input, it may take + several minutes before the final promotion can be + retrieved. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/promotions/v1beta/{parent=accounts/*}/promotions:insert", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_insert_promotion( + request, metadata + ) + pb_request = promotions.InsertPromotionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = promotions.Promotion() + pb_resp = promotions.Promotion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert_promotion(resp) + return resp + + class _ListPromotions(PromotionsServiceRestStub): + def __hash__(self): + return hash("ListPromotions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: promotions.ListPromotionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> promotions.ListPromotionsResponse: + r"""Call the list promotions method over HTTP. + + Args: + request (~.promotions.ListPromotionsRequest): + The request object. Request message for the ``ListPromotions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.promotions.ListPromotionsResponse: + Response message for the ``ListPromotions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/promotions/v1beta/{parent=accounts/*}/promotions", + }, + ] + request, metadata = self._interceptor.pre_list_promotions(request, metadata) + pb_request = promotions.ListPromotionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = promotions.ListPromotionsResponse() + pb_resp = promotions.ListPromotionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_promotions(resp) + return resp + + @property + def get_promotion( + self, + ) -> Callable[[promotions.GetPromotionRequest], promotions.Promotion]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPromotion(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert_promotion( + self, + ) -> Callable[[promotions.InsertPromotionRequest], promotions.Promotion]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InsertPromotion(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_promotions( + self, + ) -> Callable[ + [promotions.ListPromotionsRequest], promotions.ListPromotionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPromotions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PromotionsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/__init__.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/__init__.py new file mode 100644 index 000000000000..7010ede74537 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .promotions import ( + GetPromotionRequest, + InsertPromotionRequest, + ListPromotionsRequest, + ListPromotionsResponse, + Promotion, +) +from .promotions_common import ( + Attributes, + CouponValueType, + OfferType, + ProductApplicability, + PromotionStatus, + RedemptionChannel, + StoreApplicability, +) + +__all__ = ( + "GetPromotionRequest", + "InsertPromotionRequest", + "ListPromotionsRequest", + "ListPromotionsResponse", + "Promotion", + "Attributes", + "PromotionStatus", + "CouponValueType", + "OfferType", + "ProductApplicability", + "RedemptionChannel", + "StoreApplicability", +) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/promotions.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/promotions.py new file mode 100644 index 000000000000..acdc6bbb8a23 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/promotions.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.shopping.type.types import types +import proto # type: ignore + +from google.shopping.merchant_promotions_v1beta.types import promotions_common + +__protobuf__ = proto.module( + package="google.shopping.merchant.promotions.v1beta", + manifest={ + "Promotion", + "InsertPromotionRequest", + "GetPromotionRequest", + "ListPromotionsRequest", + "ListPromotionsResponse", + }, +) + + +class Promotion(proto.Message): + r"""Represents a promotion. See the following articles for more details. + + Required promotion input attributes to pass data validation checks + are primarily defined below: + + - `Promotions data + specification `__ + - `Local promotions data + specification `__ + + After inserting, updating a promotion input, it may take several + minutes before the final promotion can be retrieved. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The name of the promotion. Format: + ``accounts/{account}/promotions/{promotion}`` + promotion_id (str): + Required. The user provided promotion ID to uniquely + identify the promotion. Follow `minimum + requirements `__ + to prevent promotion disapprovals. + content_language (str): + Required. The two-letter `ISO + 639-1 `__ language + code for the promotion. + + Promotions is only for `selected + languages `__. + target_country (str): + Required. The target country used as part of the unique + identifier. Represented as a `CLDR territory + code `__. + + Promotions are only available in selected countries, `Free + Listings and Shopping + ads `__ + `Local Inventory + ads `__ + redemption_channel (MutableSequence[google.shopping.merchant_promotions_v1beta.types.RedemptionChannel]): + Required. `Redemption + channel `__ + for the promotion. At least one channel is required. + data_source (str): + Output only. The primary data source of the + promotion. + attributes (google.shopping.merchant_promotions_v1beta.types.Attributes): + Optional. A list of promotion attributes. + custom_attributes (MutableSequence[google.shopping.type.types.CustomAttribute]): + Optional. A list of custom (merchant-provided) attributes. + It can also be used for submitting any attribute of the data + specification in its generic form (for example, + ``{ "name": "size type", "value": "regular" }``). This is + useful for submitting attributes not explicitly exposed by + the API. + promotion_status (google.shopping.merchant_promotions_v1beta.types.PromotionStatus): + Output only. The `status of a + promotion `__, + data validation issues, that is, information about a + promotion computed asynchronously. + version_number (int): + Optional. Represents the existing version (freshness) of the + promotion, which can be used to preserve the right order + when multiple updates are done at the same time. + + If set, the insertion is prevented when version number is + lower than the current version number of the existing + promotion. Re-insertion (for example, promotion refresh + after 30 days) can be performed with the current + ``version_number``. + + If the operation is prevented, the aborted exception will be + thrown. + + This field is a member of `oneof`_ ``_version_number``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + promotion_id: str = proto.Field( + proto.STRING, + number=2, + ) + content_language: str = proto.Field( + proto.STRING, + number=3, + ) + target_country: str = proto.Field( + proto.STRING, + number=4, + ) + redemption_channel: MutableSequence[ + promotions_common.RedemptionChannel + ] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=promotions_common.RedemptionChannel, + ) + data_source: str = proto.Field( + proto.STRING, + number=6, + ) + attributes: promotions_common.Attributes = proto.Field( + proto.MESSAGE, + number=7, + message=promotions_common.Attributes, + ) + custom_attributes: MutableSequence[types.CustomAttribute] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=types.CustomAttribute, + ) + promotion_status: promotions_common.PromotionStatus = proto.Field( + proto.MESSAGE, + number=9, + message=promotions_common.PromotionStatus, + ) + version_number: int = proto.Field( + proto.INT64, + number=10, + optional=True, + ) + + +class InsertPromotionRequest(proto.Message): + r"""Request message for the ``InsertPromotion`` method. + + Attributes: + parent (str): + Required. The account where the promotion + will be inserted. Format: accounts/{account} + promotion (google.shopping.merchant_promotions_v1beta.types.Promotion): + Required. The promotion to insert. + data_source (str): + Required. The data source of the + `promotion `__ + Format: ``accounts/{account}/dataSources/{datasource}``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + promotion: "Promotion" = proto.Field( + proto.MESSAGE, + number=2, + message="Promotion", + ) + data_source: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetPromotionRequest(proto.Message): + r"""Request message for the ``GetPromotion`` method. + + Attributes: + name (str): + Required. The name of the promotion to retrieve. Format: + ``accounts/{account}/promotions/{promotions}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPromotionsRequest(proto.Message): + r"""Request message for the ``ListPromotions`` method. + + Attributes: + parent (str): + Required. The account to list processed promotions for. + Format: ``accounts/{account}`` + page_size (int): + Output only. The maximum number of promotions + to return. The service may return fewer than + this value. The maximum value is 1000; values + above 1000 will be coerced to 1000. If + unspecified, the maximum number of promotions + will be returned. + page_token (str): + Output only. A page token, received from a previous + ``ListPromotions`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListPromotions`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListPromotionsResponse(proto.Message): + r"""Response message for the ``ListPromotions`` method. + + Attributes: + promotions (MutableSequence[google.shopping.merchant_promotions_v1beta.types.Promotion]): + The processed promotions from the specified + account. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + promotions: MutableSequence["Promotion"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Promotion", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/promotions_common.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/promotions_common.py new file mode 100644 index 000000000000..9e75787da5d1 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/types/promotions_common.py @@ -0,0 +1,673 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.shopping.type.types import types +from google.type import interval_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.promotions.v1beta", + manifest={ + "ProductApplicability", + "StoreApplicability", + "OfferType", + "RedemptionChannel", + "CouponValueType", + "Attributes", + "PromotionStatus", + }, +) + + +class ProductApplicability(proto.Enum): + r"""Which product or list of products the promotion applies to. + + Values: + PRODUCT_APPLICABILITY_UNSPECIFIED (0): + Which products the promotion applies to is + unknown. + ALL_PRODUCTS (1): + Applicable to all products. + SPECIFIC_PRODUCTS (2): + Applicable to only a single product or list + of products. + """ + PRODUCT_APPLICABILITY_UNSPECIFIED = 0 + ALL_PRODUCTS = 1 + SPECIFIC_PRODUCTS = 2 + + +class StoreApplicability(proto.Enum): + r"""Store codes or list of store codes the promotion applies to. + Only for Local inventory ads promotions. + + Values: + STORE_APPLICABILITY_UNSPECIFIED (0): + Which store codes the promotion applies to is + unknown. + ALL_STORES (1): + Promotion applies to all stores. + SPECIFIC_STORES (2): + Promotion applies to only the specified + stores. + """ + STORE_APPLICABILITY_UNSPECIFIED = 0 + ALL_STORES = 1 + SPECIFIC_STORES = 2 + + +class OfferType(proto.Enum): + r"""Offer type of a promotion. + + Values: + OFFER_TYPE_UNSPECIFIED (0): + Unknown offer type. + NO_CODE (1): + Offer type without a code. + GENERIC_CODE (2): + Offer type with a code. Generic redemption code for the + promotion is required when ``offerType`` = ``GENERIC_CODE``. + """ + OFFER_TYPE_UNSPECIFIED = 0 + NO_CODE = 1 + GENERIC_CODE = 2 + + +class RedemptionChannel(proto.Enum): + r"""Channel of a promotion. + + Values: + REDEMPTION_CHANNEL_UNSPECIFIED (0): + Indicates that the channel is unspecified. + IN_STORE (1): + Indicates that the channel is in store. This is same as + ``local`` channel used for ``products``. + ONLINE (2): + Indicates that the channel is online. + """ + REDEMPTION_CHANNEL_UNSPECIFIED = 0 + IN_STORE = 1 + ONLINE = 2 + + +class CouponValueType(proto.Enum): + r"""`Coupon value + type `__ + of a promotion. + + Values: + COUPON_VALUE_TYPE_UNSPECIFIED (0): + Indicates that the coupon value type is + unspecified. + MONEY_OFF (1): + Money off coupon value type. + PERCENT_OFF (2): + Percent off coupon value type. + BUY_M_GET_N_MONEY_OFF (3): + Buy M quantity, get N money off coupon value type. + ``minimum_purchase_quantity`` and + ``get_this_quantity_discounted`` must be present. + ``money_off_amount`` must also be present. + BUY_M_GET_N_PERCENT_OFF (4): + Buy M quantity, get N percent off coupon value type. + ``minimum_purchase_quantity`` and + ``get_this_quantity_discounted`` must be present. + ``percent_off_percentage`` must also be present. + BUY_M_GET_MONEY_OFF (5): + Buy M quantity, get money off. ``minimum_purchase_quantity`` + and ``money_off_amount`` must be present. + BUY_M_GET_PERCENT_OFF (6): + Buy M quantity, get money off. ``minimum_purchase_quantity`` + and ``percent_off_percentage`` must be present. + FREE_GIFT (7): + Free gift with description only. + FREE_GIFT_WITH_VALUE (8): + Free gift with monetary value. + FREE_GIFT_WITH_ITEM_ID (9): + Free gift with item ID. + FREE_SHIPPING_STANDARD (10): + Standard free shipping coupon value type. + FREE_SHIPPING_OVERNIGHT (11): + Overnight free shipping coupon value type. + FREE_SHIPPING_TWO_DAY (12): + Two day free shipping coupon value type. + """ + COUPON_VALUE_TYPE_UNSPECIFIED = 0 + MONEY_OFF = 1 + PERCENT_OFF = 2 + BUY_M_GET_N_MONEY_OFF = 3 + BUY_M_GET_N_PERCENT_OFF = 4 + BUY_M_GET_MONEY_OFF = 5 + BUY_M_GET_PERCENT_OFF = 6 + FREE_GIFT = 7 + FREE_GIFT_WITH_VALUE = 8 + FREE_GIFT_WITH_ITEM_ID = 9 + FREE_SHIPPING_STANDARD = 10 + FREE_SHIPPING_OVERNIGHT = 11 + FREE_SHIPPING_TWO_DAY = 12 + + +class Attributes(proto.Message): + r"""Attributes. + + Attributes: + product_applicability (google.shopping.merchant_promotions_v1beta.types.ProductApplicability): + Required. Applicability of the promotion to either all + products or `only specific + products `__. + offer_type (google.shopping.merchant_promotions_v1beta.types.OfferType): + Required. + `Type `__ + of the promotion. Use this attribute to indicate whether or + not customers need a coupon code to redeem your promotion. + generic_redemption_code (str): + Optional. Generic redemption code for the promotion. To be + used with the ``offerType`` field and must meet the `minimum + requirements `__. + long_title (str): + Required. `Long + title `__ + for the promotion. + coupon_value_type (google.shopping.merchant_promotions_v1beta.types.CouponValueType): + Required. The [coupon value type] + (https://support.google.com/merchants/answer/13861986?ref_topic=13773355&sjid=17642868584668136159-NC) + attribute to signal the type of promotion that you are + running. Depending on type of the selected coupon value + `some attributes are + required `__. + promotion_destinations (MutableSequence[google.shopping.type.types.Destination.DestinationEnum]): + Required. The list of destinations where the promotion + applies to. If you don't specify a destination by including + a supported value in your data source, your promotion will + display in Shopping ads and free listings by default. + + You may have previously submitted the following values as + destinations for your products: Shopping Actions, Surfaces + across Google, Local surfaces across Google. To represent + these values use ``FREE_LISTINGS``, ``FREE_LOCAL_LISTINGS``, + ``LOCAL_INVENTORY_ADS``. For more details see `Promotion + destination `__ + item_id_inclusion (MutableSequence[str]): + Optional. Product filter by `item + ID `__ + for the promotion. The product filter attributes only + applies when the products eligible for promotion product + applicability ``product_applicability`` attribute is set to + `specific_products `__. + brand_inclusion (MutableSequence[str]): + Optional. Product filter by brand for the promotion. The + product filter attributes only applies when the products + eligible for promotion product applicability + ``product_applicability`` attribute is set to + `specific_products `__. + item_group_id_inclusion (MutableSequence[str]): + Optional. Product filter by item group ID for the promotion. + The product filter attributes only applies when the products + eligible for promotion product applicability + [product_applicability] attribute is set to + `specific_products `__. + product_type_inclusion (MutableSequence[str]): + Optional. Product filter by product type for the promotion. + The product filter attributes only applies when the products + eligible for promotion product applicability + ``product_applicability`` attribute is set to + `specific_products `__. + item_id_exclusion (MutableSequence[str]): + Optional. Product filter by `item ID + exclusion `__ + for the promotion. The product filter attributes only + applies when the products eligible for promotion product + applicability ``product_applicability`` attribute is set to + `specific_products `__. + brand_exclusion (MutableSequence[str]): + Optional. Product filter by `brand + exclusion `__ + for the promotion. The product filter attributes only + applies when the products eligible for promotion product + applicability ``product_applicability`` attribute is set to + `specific_products `__. + item_group_id_exclusion (MutableSequence[str]): + Optional. Product filter by `item group + ID `__. + The product filter attributes only applies when the products + eligible for promotion product applicability + ``product_applicability`` attribute is set to + `specific_products `__. + exclusion for the promotion. + product_type_exclusion (MutableSequence[str]): + Optional. Product filter by `product type + exclusion `__ + for the promotion. The product filter attributes only + applies when the products eligible for promotion product + applicability ``product_applicability`` attribute is set to + `specific_products `__. + minimum_purchase_amount (google.shopping.type.types.Price): + Optional. `Minimum purchase + amount `__ + for the promotion. + minimum_purchase_quantity (int): + Optional. `Minimum purchase + quantity `__ + for the promotion. + limit_quantity (int): + Optional. `Maximum purchase + quantity `__ + for the promotion. + limit_value (google.shopping.type.types.Price): + Optional. `Maximum product + price `__ + for promotion. + percent_off (int): + Optional. The `percentage + discount `__ + offered in the promotion. + money_off_amount (google.shopping.type.types.Price): + Optional. The `money off + amount `__ + offered in the promotion. + get_this_quantity_discounted (int): + Optional. The number of items discounted in the promotion. + The attribute is set when ``couponValueType`` is equal to + ``buy_m_get_n_money_off`` or ``buy_m_get_n_percent_off``. + free_gift_value (google.shopping.type.types.Price): + Optional. `Free gift + value `__ + for the promotion. + free_gift_description (str): + Optional. `Free gift + description `__ + for the promotion. + free_gift_item_id (str): + Optional. `Free gift item + ID `__ + for the promotion. + promotion_effective_time_period (google.type.interval_pb2.Interval): + Required. ``TimePeriod`` representation of the promotion's + effective dates. This attribute specifies that the promotion + can be tested on your online store during this time period. + promotion_display_time_period (google.type.interval_pb2.Interval): + Optional. ``TimePeriod`` representation of the promotion's + display dates. This attribute specifies the date and time + frame when the promotion will be live on Google.com and + Shopping ads. If the display time period for promotion + ``promotion_display_time_period`` attribute is not + specified, the promotion effective time period + ``promotion_effective_time_period`` determines the date and + time frame when the promotion will be live on Google.com and + Shopping ads. + store_applicability (google.shopping.merchant_promotions_v1beta.types.StoreApplicability): + Optional. Whether the promotion applies to `all stores, or + only specified + stores `__. + Local Inventory ads promotions throw an error if no store + applicability is included. An ``INVALID_ARGUMENT`` error is + thrown if ``store_applicability`` is set to ``ALL_STORES`` + and ``store_codes_inclusion`` or ``score_code_exclusion`` is + set to a value. + store_codes_inclusion (MutableSequence[str]): + Optional. `Store codes to + include `__ + for the promotion. The store filter attributes only applies + when the ``store_applicability`` attribute is set to + `specific_stores `__. + + Store code (the store ID from your Business Profile) of the + physical store the product is sold in. See the `Local + product inventory data + specification `__ + for more information. + store_codes_exclusion (MutableSequence[str]): + Optional. `Store codes to + exclude `__ + for the promotion. The store filter attributes only applies + when the ``store_applicability`` attribute is set to + `specific_stores `__. + promotion_url (str): + Optional. URL to the page on the merchant's site where the + promotion shows. Local Inventory ads promotions throw an + error if no ``promotion_url`` is included. URL is used to + confirm that the promotion is valid and can be redeemed. + """ + + product_applicability: "ProductApplicability" = proto.Field( + proto.ENUM, + number=1, + enum="ProductApplicability", + ) + offer_type: "OfferType" = proto.Field( + proto.ENUM, + number=2, + enum="OfferType", + ) + generic_redemption_code: str = proto.Field( + proto.STRING, + number=3, + ) + long_title: str = proto.Field( + proto.STRING, + number=4, + ) + coupon_value_type: "CouponValueType" = proto.Field( + proto.ENUM, + number=5, + enum="CouponValueType", + ) + promotion_destinations: MutableSequence[ + types.Destination.DestinationEnum + ] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=types.Destination.DestinationEnum, + ) + item_id_inclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + brand_inclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + item_group_id_inclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + product_type_inclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + item_id_exclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + brand_exclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + item_group_id_exclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + product_type_exclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + minimum_purchase_amount: types.Price = proto.Field( + proto.MESSAGE, + number=15, + message=types.Price, + ) + minimum_purchase_quantity: int = proto.Field( + proto.INT64, + number=16, + ) + limit_quantity: int = proto.Field( + proto.INT64, + number=17, + ) + limit_value: types.Price = proto.Field( + proto.MESSAGE, + number=18, + message=types.Price, + ) + percent_off: int = proto.Field( + proto.INT64, + number=19, + ) + money_off_amount: types.Price = proto.Field( + proto.MESSAGE, + number=20, + message=types.Price, + ) + get_this_quantity_discounted: int = proto.Field( + proto.INT64, + number=21, + ) + free_gift_value: types.Price = proto.Field( + proto.MESSAGE, + number=22, + message=types.Price, + ) + free_gift_description: str = proto.Field( + proto.STRING, + number=23, + ) + free_gift_item_id: str = proto.Field( + proto.STRING, + number=24, + ) + promotion_effective_time_period: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=25, + message=interval_pb2.Interval, + ) + promotion_display_time_period: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=26, + message=interval_pb2.Interval, + ) + store_applicability: "StoreApplicability" = proto.Field( + proto.ENUM, + number=28, + enum="StoreApplicability", + ) + store_codes_inclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29, + ) + store_codes_exclusion: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=30, + ) + promotion_url: str = proto.Field( + proto.STRING, + number=31, + ) + + +class PromotionStatus(proto.Message): + r"""The status of the promotion. + + Attributes: + destination_statuses (MutableSequence[google.shopping.merchant_promotions_v1beta.types.PromotionStatus.DestinationStatus]): + Output only. The intended destinations for + the promotion. + item_level_issues (MutableSequence[google.shopping.merchant_promotions_v1beta.types.PromotionStatus.ItemLevelIssue]): + Output only. A list of issues associated with + the promotion. + creation_date (google.protobuf.timestamp_pb2.Timestamp): + Output only. Date on which the promotion has been created in + `ISO 8601 `__ format: + Date, time, and offset, for example + ``2020-01-02T09:00:00+01:00`` or ``2020-01-02T09:00:00Z`` + last_update_date (google.protobuf.timestamp_pb2.Timestamp): + Output only. Date on which the promotion status has been + last updated in `ISO + 8601 `__ format: + Date, time, and offset, for example + ``2020-01-02T09:00:00+01:00`` or ``2020-01-02T09:00:00Z`` + """ + + class DestinationStatus(proto.Message): + r"""The status for the specified destination. + + Attributes: + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + Output only. The name of the promotion + destination. + status (google.shopping.merchant_promotions_v1beta.types.PromotionStatus.DestinationStatus.State): + Output only. The status for the specified + destination. + """ + + class State(proto.Enum): + r"""The current state of the promotion. + + Values: + STATE_UNSPECIFIED (0): + Unknown promotion state. + IN_REVIEW (1): + The promotion is under review. + REJECTED (2): + The promotion is disapproved. + LIVE (3): + The promotion is approved and active. + STOPPED (4): + The promotion is stopped by merchant. + EXPIRED (5): + The promotion is no longer active. + PENDING (6): + The promotion is not stopped, and all reviews + are approved, but the active date is in the + future. + """ + STATE_UNSPECIFIED = 0 + IN_REVIEW = 1 + REJECTED = 2 + LIVE = 3 + STOPPED = 4 + EXPIRED = 5 + PENDING = 6 + + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=1, + enum=types.ReportingContext.ReportingContextEnum, + ) + status: "PromotionStatus.DestinationStatus.State" = proto.Field( + proto.ENUM, + number=2, + enum="PromotionStatus.DestinationStatus.State", + ) + + class ItemLevelIssue(proto.Message): + r"""The issue associated with the promotion. + + Attributes: + code (str): + Output only. The error code of the issue. + severity (google.shopping.merchant_promotions_v1beta.types.PromotionStatus.ItemLevelIssue.Severity): + Output only. How this issue affects serving + of the promotion. + resolution (str): + Output only. Whether the issue can be + resolved by the merchant. + attribute (str): + Output only. The attribute's name, if the + issue is caused by a single attribute. + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + Output only. The destination the issue + applies to. + description (str): + Output only. A short issue description in + English. + detail (str): + Output only. A detailed issue description in + English. + documentation (str): + Output only. The URL of a web page to help + with resolving this issue. + applicable_countries (MutableSequence[str]): + Output only. List of country codes (ISO + 3166-1 alpha-2) where issue applies to the + offer. + """ + + class Severity(proto.Enum): + r"""The severity of the issue. + + Values: + SEVERITY_UNSPECIFIED (0): + Not specified. + NOT_IMPACTED (1): + This issue represents a warning and does not + have a direct affect on the promotion. + DEMOTED (2): + The promotion is demoted and most likely have + limited performance in search results + DISAPPROVED (3): + Issue disapproves the promotion. + """ + SEVERITY_UNSPECIFIED = 0 + NOT_IMPACTED = 1 + DEMOTED = 2 + DISAPPROVED = 3 + + code: str = proto.Field( + proto.STRING, + number=1, + ) + severity: "PromotionStatus.ItemLevelIssue.Severity" = proto.Field( + proto.ENUM, + number=2, + enum="PromotionStatus.ItemLevelIssue.Severity", + ) + resolution: str = proto.Field( + proto.STRING, + number=3, + ) + attribute: str = proto.Field( + proto.STRING, + number=4, + ) + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=5, + enum=types.ReportingContext.ReportingContextEnum, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + detail: str = proto.Field( + proto.STRING, + number=7, + ) + documentation: str = proto.Field( + proto.STRING, + number=8, + ) + applicable_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + + destination_statuses: MutableSequence[DestinationStatus] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DestinationStatus, + ) + item_level_issues: MutableSequence[ItemLevelIssue] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ItemLevelIssue, + ) + creation_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + last_update_date: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-promotions/mypy.ini b/packages/google-shopping-merchant-promotions/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-shopping-merchant-promotions/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-shopping-merchant-promotions/noxfile.py b/packages/google-shopping-merchant-promotions/noxfile.py new file mode 100644 index 000000000000..1e6cd48d0529 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_get_promotion_async.py b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_get_promotion_async.py new file mode 100644 index 000000000000..66bd0db4ca0d --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_get_promotion_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPromotion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-promotions + + +# [START merchantapi_v1beta_generated_PromotionsService_GetPromotion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_promotions_v1beta + + +async def sample_get_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.GetPromotionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_promotion(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_PromotionsService_GetPromotion_async] diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_get_promotion_sync.py b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_get_promotion_sync.py new file mode 100644 index 000000000000..6b8e9dade3e9 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_get_promotion_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPromotion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-promotions + + +# [START merchantapi_v1beta_generated_PromotionsService_GetPromotion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_promotions_v1beta + + +def sample_get_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.GetPromotionRequest( + name="name_value", + ) + + # Make the request + response = client.get_promotion(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_PromotionsService_GetPromotion_sync] diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_insert_promotion_async.py b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_insert_promotion_async.py new file mode 100644 index 000000000000..d9245ecabde7 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_insert_promotion_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InsertPromotion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-promotions + + +# [START merchantapi_v1beta_generated_PromotionsService_InsertPromotion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_promotions_v1beta + + +async def sample_insert_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceAsyncClient() + + # Initialize request argument(s) + promotion = merchant_promotions_v1beta.Promotion() + promotion.promotion_id = "promotion_id_value" + promotion.content_language = "content_language_value" + promotion.target_country = "target_country_value" + promotion.redemption_channel = ['ONLINE'] + + request = merchant_promotions_v1beta.InsertPromotionRequest( + parent="parent_value", + promotion=promotion, + data_source="data_source_value", + ) + + # Make the request + response = await client.insert_promotion(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_PromotionsService_InsertPromotion_async] diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_insert_promotion_sync.py b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_insert_promotion_sync.py new file mode 100644 index 000000000000..2fd44913b58c --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_insert_promotion_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InsertPromotion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-promotions + + +# [START merchantapi_v1beta_generated_PromotionsService_InsertPromotion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_promotions_v1beta + + +def sample_insert_promotion(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceClient() + + # Initialize request argument(s) + promotion = merchant_promotions_v1beta.Promotion() + promotion.promotion_id = "promotion_id_value" + promotion.content_language = "content_language_value" + promotion.target_country = "target_country_value" + promotion.redemption_channel = ['ONLINE'] + + request = merchant_promotions_v1beta.InsertPromotionRequest( + parent="parent_value", + promotion=promotion, + data_source="data_source_value", + ) + + # Make the request + response = client.insert_promotion(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_PromotionsService_InsertPromotion_sync] diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_list_promotions_async.py b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_list_promotions_async.py new file mode 100644 index 000000000000..ae88a1355d74 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_list_promotions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPromotions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-promotions + + +# [START merchantapi_v1beta_generated_PromotionsService_ListPromotions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_promotions_v1beta + + +async def sample_list_promotions(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.ListPromotionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_promotions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_PromotionsService_ListPromotions_async] diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_list_promotions_sync.py b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_list_promotions_sync.py new file mode 100644 index 000000000000..d2d3f2a247df --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/merchantapi_v1beta_generated_promotions_service_list_promotions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPromotions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-promotions + + +# [START merchantapi_v1beta_generated_PromotionsService_ListPromotions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_promotions_v1beta + + +def sample_list_promotions(): + # Create a client + client = merchant_promotions_v1beta.PromotionsServiceClient() + + # Initialize request argument(s) + request = merchant_promotions_v1beta.ListPromotionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_promotions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END merchantapi_v1beta_generated_PromotionsService_ListPromotions_sync] diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json b/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json new file mode 100644 index 000000000000..4b7f108a5403 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json @@ -0,0 +1,490 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.shopping.merchant.promotions.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-shopping-merchant-promotions", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceAsyncClient", + "shortName": "PromotionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceAsyncClient.get_promotion", + "method": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService.GetPromotion", + "service": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService", + "shortName": "PromotionsService" + }, + "shortName": "GetPromotion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_promotions_v1beta.types.GetPromotionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_promotions_v1beta.types.Promotion", + "shortName": "get_promotion" + }, + "description": "Sample for GetPromotion", + "file": "merchantapi_v1beta_generated_promotions_service_get_promotion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_PromotionsService_GetPromotion_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_promotions_service_get_promotion_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceClient", + "shortName": "PromotionsServiceClient" + }, + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceClient.get_promotion", + "method": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService.GetPromotion", + "service": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService", + "shortName": "PromotionsService" + }, + "shortName": "GetPromotion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_promotions_v1beta.types.GetPromotionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_promotions_v1beta.types.Promotion", + "shortName": "get_promotion" + }, + "description": "Sample for GetPromotion", + "file": "merchantapi_v1beta_generated_promotions_service_get_promotion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_PromotionsService_GetPromotion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_promotions_service_get_promotion_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceAsyncClient", + "shortName": "PromotionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceAsyncClient.insert_promotion", + "method": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService.InsertPromotion", + "service": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService", + "shortName": "PromotionsService" + }, + "shortName": "InsertPromotion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_promotions_v1beta.types.InsertPromotionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_promotions_v1beta.types.Promotion", + "shortName": "insert_promotion" + }, + "description": "Sample for InsertPromotion", + "file": "merchantapi_v1beta_generated_promotions_service_insert_promotion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_PromotionsService_InsertPromotion_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_promotions_service_insert_promotion_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceClient", + "shortName": "PromotionsServiceClient" + }, + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceClient.insert_promotion", + "method": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService.InsertPromotion", + "service": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService", + "shortName": "PromotionsService" + }, + "shortName": "InsertPromotion" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_promotions_v1beta.types.InsertPromotionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_promotions_v1beta.types.Promotion", + "shortName": "insert_promotion" + }, + "description": "Sample for InsertPromotion", + "file": "merchantapi_v1beta_generated_promotions_service_insert_promotion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_PromotionsService_InsertPromotion_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_promotions_service_insert_promotion_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceAsyncClient", + "shortName": "PromotionsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceAsyncClient.list_promotions", + "method": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService.ListPromotions", + "service": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService", + "shortName": "PromotionsService" + }, + "shortName": "ListPromotions" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_promotions_v1beta.types.ListPromotionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_promotions_v1beta.services.promotions_service.pagers.ListPromotionsAsyncPager", + "shortName": "list_promotions" + }, + "description": "Sample for ListPromotions", + "file": "merchantapi_v1beta_generated_promotions_service_list_promotions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_PromotionsService_ListPromotions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_promotions_service_list_promotions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceClient", + "shortName": "PromotionsServiceClient" + }, + "fullName": "google.shopping.merchant_promotions_v1beta.PromotionsServiceClient.list_promotions", + "method": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService.ListPromotions", + "service": { + "fullName": "google.shopping.merchant.promotions.v1beta.PromotionsService", + "shortName": "PromotionsService" + }, + "shortName": "ListPromotions" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_promotions_v1beta.types.ListPromotionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_promotions_v1beta.services.promotions_service.pagers.ListPromotionsPager", + "shortName": "list_promotions" + }, + "description": "Sample for ListPromotions", + "file": "merchantapi_v1beta_generated_promotions_service_list_promotions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_PromotionsService_ListPromotions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_promotions_service_list_promotions_sync.py" + } + ] +} diff --git a/packages/google-shopping-merchant-promotions/scripts/decrypt-secrets.sh b/packages/google-shopping-merchant-promotions/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-shopping-merchant-promotions/scripts/fixup_merchant_promotions_v1beta_keywords.py b/packages/google-shopping-merchant-promotions/scripts/fixup_merchant_promotions_v1beta_keywords.py new file mode 100644 index 000000000000..398615e0afb5 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/scripts/fixup_merchant_promotions_v1beta_keywords.py @@ -0,0 +1,178 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class merchant_promotionsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'get_promotion': ('name', ), + 'insert_promotion': ('parent', 'promotion', 'data_source', ), + 'list_promotions': ('parent', 'page_size', 'page_token', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=merchant_promotionsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the merchant_promotions client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-shopping-merchant-promotions/setup.py b/packages/google-shopping-merchant-promotions/setup.py new file mode 100644 index 000000000000..fa22aaf8da2d --- /dev/null +++ b/packages/google-shopping-merchant-promotions/setup.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-shopping-merchant-promotions" + + +description = "Google Shopping Merchant Promotions API client library" + +version = None + +with open( + os.path.join(package_root, "google/shopping/merchant_promotions/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-shopping-type >= 0.1.0, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-shopping-merchant-promotions/testing/.gitignore b/packages/google-shopping-merchant-promotions/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.10.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.10.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.11.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.11.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.12.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.12.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt new file mode 100644 index 000000000000..26e4a9895f74 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 +google-shopping-type==0.1.0 diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.8.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.8.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.9.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.9.txt new file mode 100644 index 000000000000..4cae520d02b2 --- /dev/null +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-shopping-type diff --git a/packages/google-shopping-merchant-promotions/tests/__init__.py b/packages/google-shopping-merchant-promotions/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-promotions/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-promotions/tests/unit/__init__.py b/packages/google-shopping-merchant-promotions/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-promotions/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-promotions/tests/unit/gapic/__init__.py b/packages/google-shopping-merchant-promotions/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-promotions/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/__init__.py b/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py b/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py new file mode 100644 index 000000000000..b28f3b66b12e --- /dev/null +++ b/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py @@ -0,0 +1,4168 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.shopping.type.types import types +from google.type import interval_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_promotions_v1beta.services.promotions_service import ( + PromotionsServiceAsyncClient, + PromotionsServiceClient, + pagers, + transports, +) +from google.shopping.merchant_promotions_v1beta.types import ( + promotions, + promotions_common, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PromotionsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PromotionsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PromotionsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PromotionsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PromotionsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PromotionsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert PromotionsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PromotionsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PromotionsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PromotionsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PromotionsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PromotionsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PromotionsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PromotionsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PromotionsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PromotionsServiceClient._get_client_cert_source(None, False) is None + assert ( + PromotionsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + PromotionsServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PromotionsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PromotionsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PromotionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceClient), +) +@mock.patch.object( + PromotionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PromotionsServiceClient._DEFAULT_UNIVERSE + default_endpoint = PromotionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PromotionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PromotionsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PromotionsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PromotionsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PromotionsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PromotionsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PromotionsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PromotionsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PromotionsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PromotionsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PromotionsServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PromotionsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PromotionsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PromotionsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PromotionsServiceClient._get_universe_domain(None, None) + == PromotionsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PromotionsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PromotionsServiceClient, transports.PromotionsServiceGrpcTransport, "grpc"), + (PromotionsServiceClient, transports.PromotionsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PromotionsServiceClient, "grpc"), + (PromotionsServiceAsyncClient, "grpc_asyncio"), + (PromotionsServiceClient, "rest"), + ], +) +def test_promotions_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PromotionsServiceGrpcTransport, "grpc"), + (transports.PromotionsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PromotionsServiceRestTransport, "rest"), + ], +) +def test_promotions_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PromotionsServiceClient, "grpc"), + (PromotionsServiceAsyncClient, "grpc_asyncio"), + (PromotionsServiceClient, "rest"), + ], +) +def test_promotions_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_promotions_service_client_get_transport_class(): + transport = PromotionsServiceClient.get_transport_class() + available_transports = [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceRestTransport, + ] + assert transport in available_transports + + transport = PromotionsServiceClient.get_transport_class("grpc") + assert transport == transports.PromotionsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PromotionsServiceClient, transports.PromotionsServiceGrpcTransport, "grpc"), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PromotionsServiceClient, transports.PromotionsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PromotionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceClient), +) +@mock.patch.object( + PromotionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceAsyncClient), +) +def test_promotions_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PromotionsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PromotionsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PromotionsServiceClient, + transports.PromotionsServiceGrpcTransport, + "grpc", + "true", + ), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PromotionsServiceClient, + transports.PromotionsServiceGrpcTransport, + "grpc", + "false", + ), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PromotionsServiceClient, + transports.PromotionsServiceRestTransport, + "rest", + "true", + ), + ( + PromotionsServiceClient, + transports.PromotionsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PromotionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceClient), +) +@mock.patch.object( + PromotionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_promotions_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PromotionsServiceClient, PromotionsServiceAsyncClient] +) +@mock.patch.object( + PromotionsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PromotionsServiceClient), +) +@mock.patch.object( + PromotionsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PromotionsServiceAsyncClient), +) +def test_promotions_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [PromotionsServiceClient, PromotionsServiceAsyncClient] +) +@mock.patch.object( + PromotionsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceClient), +) +@mock.patch.object( + PromotionsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PromotionsServiceAsyncClient), +) +def test_promotions_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PromotionsServiceClient._DEFAULT_UNIVERSE + default_endpoint = PromotionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PromotionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PromotionsServiceClient, transports.PromotionsServiceGrpcTransport, "grpc"), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PromotionsServiceClient, transports.PromotionsServiceRestTransport, "rest"), + ], +) +def test_promotions_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PromotionsServiceClient, + transports.PromotionsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PromotionsServiceClient, + transports.PromotionsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_promotions_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_promotions_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_promotions_v1beta.services.promotions_service.transports.PromotionsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PromotionsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PromotionsServiceClient, + transports.PromotionsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_promotions_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + promotions.InsertPromotionRequest, + dict, + ], +) +def test_insert_promotion(request_type, transport: str = "grpc"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + response = client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = promotions.InsertPromotionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, promotions.Promotion) + assert response.name == "name_value" + assert response.promotion_id == "promotion_id_value" + assert response.content_language == "content_language_value" + assert response.target_country == "target_country_value" + assert response.redemption_channel == [promotions_common.RedemptionChannel.IN_STORE] + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +def test_insert_promotion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.insert_promotion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.InsertPromotionRequest() + + +def test_insert_promotion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = promotions.InsertPromotionRequest( + parent="parent_value", + data_source="data_source_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.insert_promotion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.InsertPromotionRequest( + parent="parent_value", + data_source="data_source_value", + ) + + +def test_insert_promotion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert_promotion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.insert_promotion + ] = mock_rpc + request = {} + client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.insert_promotion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_insert_promotion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + ) + response = await client.insert_promotion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.InsertPromotionRequest() + + +@pytest.mark.asyncio +async def test_insert_promotion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.insert_promotion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.insert_promotion + ] = mock_object + + request = {} + await client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.insert_promotion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_insert_promotion_async( + transport: str = "grpc_asyncio", request_type=promotions.InsertPromotionRequest +): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + ) + response = await client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = promotions.InsertPromotionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, promotions.Promotion) + assert response.name == "name_value" + assert response.promotion_id == "promotion_id_value" + assert response.content_language == "content_language_value" + assert response.target_country == "target_country_value" + assert response.redemption_channel == [promotions_common.RedemptionChannel.IN_STORE] + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +@pytest.mark.asyncio +async def test_insert_promotion_async_from_dict(): + await test_insert_promotion_async(request_type=dict) + + +def test_insert_promotion_field_headers(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = promotions.InsertPromotionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + call.return_value = promotions.Promotion() + client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_insert_promotion_field_headers_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = promotions.InsertPromotionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.insert_promotion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion() + ) + await client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + promotions.GetPromotionRequest, + dict, + ], +) +def test_get_promotion(request_type, transport: str = "grpc"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + response = client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = promotions.GetPromotionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, promotions.Promotion) + assert response.name == "name_value" + assert response.promotion_id == "promotion_id_value" + assert response.content_language == "content_language_value" + assert response.target_country == "target_country_value" + assert response.redemption_channel == [promotions_common.RedemptionChannel.IN_STORE] + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +def test_get_promotion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_promotion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.GetPromotionRequest() + + +def test_get_promotion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = promotions.GetPromotionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_promotion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.GetPromotionRequest( + name="name_value", + ) + + +def test_get_promotion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_promotion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_promotion] = mock_rpc + request = {} + client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_promotion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_promotion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + ) + response = await client.get_promotion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.GetPromotionRequest() + + +@pytest.mark.asyncio +async def test_get_promotion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_promotion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_promotion + ] = mock_object + + request = {} + await client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_promotion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_promotion_async( + transport: str = "grpc_asyncio", request_type=promotions.GetPromotionRequest +): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + ) + response = await client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = promotions.GetPromotionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, promotions.Promotion) + assert response.name == "name_value" + assert response.promotion_id == "promotion_id_value" + assert response.content_language == "content_language_value" + assert response.target_country == "target_country_value" + assert response.redemption_channel == [promotions_common.RedemptionChannel.IN_STORE] + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +@pytest.mark.asyncio +async def test_get_promotion_async_from_dict(): + await test_get_promotion_async(request_type=dict) + + +def test_get_promotion_field_headers(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = promotions.GetPromotionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + call.return_value = promotions.Promotion() + client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_promotion_field_headers_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = promotions.GetPromotionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion() + ) + await client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_promotion_flattened(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.Promotion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_promotion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_promotion_flattened_error(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_promotion( + promotions.GetPromotionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_promotion_flattened_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_promotion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.Promotion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.Promotion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_promotion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_promotion_flattened_error_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_promotion( + promotions.GetPromotionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + promotions.ListPromotionsRequest, + dict, + ], +) +def test_list_promotions(request_type, transport: str = "grpc"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.ListPromotionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = promotions.ListPromotionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPromotionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_promotions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_promotions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.ListPromotionsRequest() + + +def test_list_promotions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = promotions.ListPromotionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_promotions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.ListPromotionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_promotions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_promotions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_promotions] = mock_rpc + request = {} + client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_promotions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_promotions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.ListPromotionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_promotions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == promotions.ListPromotionsRequest() + + +@pytest.mark.asyncio +async def test_list_promotions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_promotions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_promotions + ] = mock_object + + request = {} + await client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_promotions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_promotions_async( + transport: str = "grpc_asyncio", request_type=promotions.ListPromotionsRequest +): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.ListPromotionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = promotions.ListPromotionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPromotionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_promotions_async_from_dict(): + await test_list_promotions_async(request_type=dict) + + +def test_list_promotions_field_headers(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = promotions.ListPromotionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + call.return_value = promotions.ListPromotionsResponse() + client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_promotions_field_headers_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = promotions.ListPromotionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.ListPromotionsResponse() + ) + await client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_promotions_flattened(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.ListPromotionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_promotions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_promotions_flattened_error(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_promotions( + promotions.ListPromotionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_promotions_flattened_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = promotions.ListPromotionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + promotions.ListPromotionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_promotions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_promotions_flattened_error_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_promotions( + promotions.ListPromotionsRequest(), + parent="parent_value", + ) + + +def test_list_promotions_pager(transport_name: str = "grpc"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + promotions.Promotion(), + ], + next_page_token="abc", + ), + promotions.ListPromotionsResponse( + promotions=[], + next_page_token="def", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + ], + next_page_token="ghi", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_promotions(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, promotions.Promotion) for i in results) + + +def test_list_promotions_pages(transport_name: str = "grpc"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_promotions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + promotions.Promotion(), + ], + next_page_token="abc", + ), + promotions.ListPromotionsResponse( + promotions=[], + next_page_token="def", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + ], + next_page_token="ghi", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_promotions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_promotions_async_pager(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_promotions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + promotions.Promotion(), + ], + next_page_token="abc", + ), + promotions.ListPromotionsResponse( + promotions=[], + next_page_token="def", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + ], + next_page_token="ghi", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_promotions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, promotions.Promotion) for i in responses) + + +@pytest.mark.asyncio +async def test_list_promotions_async_pages(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_promotions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + promotions.Promotion(), + ], + next_page_token="abc", + ), + promotions.ListPromotionsResponse( + promotions=[], + next_page_token="def", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + ], + next_page_token="ghi", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_promotions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + promotions.InsertPromotionRequest, + dict, + ], +) +def test_insert_promotion_rest(request_type): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = promotions.Promotion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_promotion(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, promotions.Promotion) + assert response.name == "name_value" + assert response.promotion_id == "promotion_id_value" + assert response.content_language == "content_language_value" + assert response.target_country == "target_country_value" + assert response.redemption_channel == [promotions_common.RedemptionChannel.IN_STORE] + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +def test_insert_promotion_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert_promotion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.insert_promotion + ] = mock_rpc + + request = {} + client.insert_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.insert_promotion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_promotion_rest_required_fields( + request_type=promotions.InsertPromotionRequest, +): + transport_class = transports.PromotionsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_source"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert_promotion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["dataSource"] = "data_source_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert_promotion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "dataSource" in jsonified_request + assert jsonified_request["dataSource"] == "data_source_value" + + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = promotions.Promotion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = promotions.Promotion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_promotion(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_promotion_rest_unset_required_fields(): + transport = transports.PromotionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert_promotion._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "promotion", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_promotion_rest_interceptors(null_interceptor): + transport = transports.PromotionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PromotionsServiceRestInterceptor(), + ) + client = PromotionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PromotionsServiceRestInterceptor, "post_insert_promotion" + ) as post, mock.patch.object( + transports.PromotionsServiceRestInterceptor, "pre_insert_promotion" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = promotions.InsertPromotionRequest.pb( + promotions.InsertPromotionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = promotions.Promotion.to_json(promotions.Promotion()) + + request = promotions.InsertPromotionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = promotions.Promotion() + + client.insert_promotion( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_promotion_rest_bad_request( + transport: str = "rest", request_type=promotions.InsertPromotionRequest +): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_promotion(request) + + +def test_insert_promotion_rest_error(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + promotions.GetPromotionRequest, + dict, + ], +) +def test_get_promotion_rest(request_type): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/promotions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = promotions.Promotion( + name="name_value", + promotion_id="promotion_id_value", + content_language="content_language_value", + target_country="target_country_value", + redemption_channel=[promotions_common.RedemptionChannel.IN_STORE], + data_source="data_source_value", + version_number=1518, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = promotions.Promotion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_promotion(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, promotions.Promotion) + assert response.name == "name_value" + assert response.promotion_id == "promotion_id_value" + assert response.content_language == "content_language_value" + assert response.target_country == "target_country_value" + assert response.redemption_channel == [promotions_common.RedemptionChannel.IN_STORE] + assert response.data_source == "data_source_value" + assert response.version_number == 1518 + + +def test_get_promotion_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_promotion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_promotion] = mock_rpc + + request = {} + client.get_promotion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_promotion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_promotion_rest_required_fields( + request_type=promotions.GetPromotionRequest, +): + transport_class = transports.PromotionsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_promotion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_promotion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = promotions.Promotion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = promotions.Promotion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_promotion(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_promotion_rest_unset_required_fields(): + transport = transports.PromotionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_promotion._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_promotion_rest_interceptors(null_interceptor): + transport = transports.PromotionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PromotionsServiceRestInterceptor(), + ) + client = PromotionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PromotionsServiceRestInterceptor, "post_get_promotion" + ) as post, mock.patch.object( + transports.PromotionsServiceRestInterceptor, "pre_get_promotion" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = promotions.GetPromotionRequest.pb(promotions.GetPromotionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = promotions.Promotion.to_json(promotions.Promotion()) + + request = promotions.GetPromotionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = promotions.Promotion() + + client.get_promotion( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_promotion_rest_bad_request( + transport: str = "rest", request_type=promotions.GetPromotionRequest +): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/promotions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_promotion(request) + + +def test_get_promotion_rest_flattened(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = promotions.Promotion() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/promotions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = promotions.Promotion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_promotion(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/promotions/v1beta/{name=accounts/*/promotions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_promotion_rest_flattened_error(transport: str = "rest"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_promotion( + promotions.GetPromotionRequest(), + name="name_value", + ) + + +def test_get_promotion_rest_error(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + promotions.ListPromotionsRequest, + dict, + ], +) +def test_list_promotions_rest(request_type): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = promotions.ListPromotionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = promotions.ListPromotionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_promotions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPromotionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_promotions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_promotions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_promotions] = mock_rpc + + request = {} + client.list_promotions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_promotions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_promotions_rest_required_fields( + request_type=promotions.ListPromotionsRequest, +): + transport_class = transports.PromotionsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_promotions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_promotions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = promotions.ListPromotionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = promotions.ListPromotionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_promotions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_promotions_rest_unset_required_fields(): + transport = transports.PromotionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_promotions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_promotions_rest_interceptors(null_interceptor): + transport = transports.PromotionsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PromotionsServiceRestInterceptor(), + ) + client = PromotionsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PromotionsServiceRestInterceptor, "post_list_promotions" + ) as post, mock.patch.object( + transports.PromotionsServiceRestInterceptor, "pre_list_promotions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = promotions.ListPromotionsRequest.pb( + promotions.ListPromotionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = promotions.ListPromotionsResponse.to_json( + promotions.ListPromotionsResponse() + ) + + request = promotions.ListPromotionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = promotions.ListPromotionsResponse() + + client.list_promotions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_promotions_rest_bad_request( + transport: str = "rest", request_type=promotions.ListPromotionsRequest +): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_promotions(request) + + +def test_list_promotions_rest_flattened(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = promotions.ListPromotionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = promotions.ListPromotionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_promotions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/promotions/v1beta/{parent=accounts/*}/promotions" + % client.transport._host, + args[1], + ) + + +def test_list_promotions_rest_flattened_error(transport: str = "rest"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_promotions( + promotions.ListPromotionsRequest(), + parent="parent_value", + ) + + +def test_list_promotions_rest_pager(transport: str = "rest"): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + promotions.Promotion(), + ], + next_page_token="abc", + ), + promotions.ListPromotionsResponse( + promotions=[], + next_page_token="def", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + ], + next_page_token="ghi", + ), + promotions.ListPromotionsResponse( + promotions=[ + promotions.Promotion(), + promotions.Promotion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(promotions.ListPromotionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_promotions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, promotions.Promotion) for i in results) + + pages = list(client.list_promotions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PromotionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PromotionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PromotionsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PromotionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PromotionsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PromotionsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PromotionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PromotionsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PromotionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PromotionsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PromotionsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PromotionsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceGrpcAsyncIOTransport, + transports.PromotionsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PromotionsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PromotionsServiceGrpcTransport, + ) + + +def test_promotions_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PromotionsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_promotions_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_promotions_v1beta.services.promotions_service.transports.PromotionsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PromotionsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "insert_promotion", + "get_promotion", + "list_promotions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_promotions_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_promotions_v1beta.services.promotions_service.transports.PromotionsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PromotionsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_promotions_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_promotions_v1beta.services.promotions_service.transports.PromotionsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PromotionsServiceTransport() + adc.assert_called_once() + + +def test_promotions_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PromotionsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceGrpcAsyncIOTransport, + ], +) +def test_promotions_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceGrpcAsyncIOTransport, + transports.PromotionsServiceRestTransport, + ], +) +def test_promotions_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PromotionsServiceGrpcTransport, grpc_helpers), + (transports.PromotionsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_promotions_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceGrpcAsyncIOTransport, + ], +) +def test_promotions_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_promotions_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PromotionsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_promotions_service_host_no_port(transport_name): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_promotions_service_host_with_port(transport_name): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_promotions_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PromotionsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PromotionsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.insert_promotion._session + session2 = client2.transport.insert_promotion._session + assert session1 != session2 + session1 = client1.transport.get_promotion._session + session2 = client2.transport.get_promotion._session + assert session1 != session2 + session1 = client1.transport.list_promotions._session + session2 = client2.transport.list_promotions._session + assert session1 != session2 + + +def test_promotions_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PromotionsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_promotions_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PromotionsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceGrpcAsyncIOTransport, + ], +) +def test_promotions_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PromotionsServiceGrpcTransport, + transports.PromotionsServiceGrpcAsyncIOTransport, + ], +) +def test_promotions_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_promotion_path(): + account = "squid" + promotion = "clam" + expected = "accounts/{account}/promotions/{promotion}".format( + account=account, + promotion=promotion, + ) + actual = PromotionsServiceClient.promotion_path(account, promotion) + assert expected == actual + + +def test_parse_promotion_path(): + expected = { + "account": "whelk", + "promotion": "octopus", + } + path = PromotionsServiceClient.promotion_path(**expected) + + # Check that the path construction is reversible. + actual = PromotionsServiceClient.parse_promotion_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PromotionsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = PromotionsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PromotionsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PromotionsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = PromotionsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PromotionsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PromotionsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = PromotionsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PromotionsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = PromotionsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = PromotionsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PromotionsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PromotionsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = PromotionsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PromotionsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PromotionsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PromotionsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PromotionsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PromotionsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PromotionsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PromotionsServiceClient, transports.PromotionsServiceGrpcTransport), + ( + PromotionsServiceAsyncClient, + transports.PromotionsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py b/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py index 4d5e38e0293b..fc750f21b01a 100644 --- a/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py +++ b/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py @@ -1523,13 +1523,13 @@ def test_list_quota_groups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_quota_groups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/google-shopping-merchant-reports/CHANGELOG.md b/packages/google-shopping-merchant-reports/CHANGELOG.md index 822fb3ca6378..84d90537bc85 100644 --- a/packages/google-shopping-merchant-reports/CHANGELOG.md +++ b/packages/google-shopping-merchant-reports/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-reports-v0.1.6...google-shopping-merchant-reports-v0.1.7) (2024-05-27) + + +### Features + +* add a new enum `Effectiveness` ([0da7370](https://github.com/googleapis/google-cloud-python/commit/0da7370d5d21557bb0c04b8c9b1c46c9a583ad1d)) +* add a new field `effectiveness` to message `.google.shopping.merchant.reports.v1beta.PriceInsightsProductView` ([0da7370](https://github.com/googleapis/google-cloud-python/commit/0da7370d5d21557bb0c04b8c9b1c46c9a583ad1d)) +* add non_product_performance_view table to Reports sub-API ([0da7370](https://github.com/googleapis/google-cloud-python/commit/0da7370d5d21557bb0c04b8c9b1c46c9a583ad1d)) + ## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-shopping-merchant-reports-v0.1.5...google-shopping-merchant-reports-v0.1.6) (2024-04-15) diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py index fcc16b288b5d..253f2a989bc4 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py @@ -31,6 +31,7 @@ CompetitiveVisibilityCompetitorView, CompetitiveVisibilityTopMerchantView, MarketingMethod, + NonProductPerformanceView, PriceCompetitivenessProductView, PriceInsightsProductView, ProductPerformanceView, @@ -53,6 +54,7 @@ "CompetitiveVisibilityCompetitorView", "CompetitiveVisibilityTopMerchantView", "MarketingMethod", + "NonProductPerformanceView", "PriceCompetitivenessProductView", "PriceInsightsProductView", "ProductPerformanceView", diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/__init__.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/__init__.py index 062bf4dc42e6..fde3aaf91244 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/__init__.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/__init__.py @@ -26,6 +26,7 @@ CompetitiveVisibilityCompetitorView, CompetitiveVisibilityTopMerchantView, MarketingMethod, + NonProductPerformanceView, PriceCompetitivenessProductView, PriceInsightsProductView, ProductPerformanceView, @@ -47,6 +48,7 @@ "CompetitiveVisibilityCompetitorView", "CompetitiveVisibilityTopMerchantView", "MarketingMethod", + "NonProductPerformanceView", "PriceCompetitivenessProductView", "PriceInsightsProductView", "ProductPerformanceView", diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/__init__.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/__init__.py index 0315bd1b3df5..64015432c659 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/__init__.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/__init__.py @@ -20,6 +20,7 @@ CompetitiveVisibilityCompetitorView, CompetitiveVisibilityTopMerchantView, MarketingMethod, + NonProductPerformanceView, PriceCompetitivenessProductView, PriceInsightsProductView, ProductPerformanceView, @@ -40,6 +41,7 @@ "CompetitiveVisibilityCompetitorView", "CompetitiveVisibilityTopMerchantView", "MarketingMethod", + "NonProductPerformanceView", "PriceCompetitivenessProductView", "PriceInsightsProductView", "ProductPerformanceView", diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py index 0476d1003ce6..ab35339ba41f 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py @@ -34,6 +34,7 @@ "PriceInsightsProductView", "BestSellersProductClusterView", "BestSellersBrandView", + "NonProductPerformanceView", "CompetitiveVisibilityCompetitorView", "CompetitiveVisibilityTopMerchantView", "CompetitiveVisibilityBenchmarkView", @@ -127,6 +128,9 @@ class ReportRow(proto.Message): product_performance_view (google.shopping.merchant_reports_v1beta.types.ProductPerformanceView): Fields available for query in ``product_performance_view`` table. + non_product_performance_view (google.shopping.merchant_reports_v1beta.types.NonProductPerformanceView): + Fields available for query in + ``non_product_performance_view`` table. product_view (google.shopping.merchant_reports_v1beta.types.ProductView): Fields available for query in ``product_view`` table. price_competitiveness_product_view (google.shopping.merchant_reports_v1beta.types.PriceCompetitivenessProductView): @@ -157,6 +161,11 @@ class ReportRow(proto.Message): number=1, message="ProductPerformanceView", ) + non_product_performance_view: "NonProductPerformanceView" = proto.Field( + proto.MESSAGE, + number=7, + message="NonProductPerformanceView", + ) product_view: "ProductView" = proto.Field( proto.MESSAGE, number=2, @@ -643,7 +652,7 @@ class ProductView(proto.Message): shipping_label (str): Normalized `shipping label `__ - specified in the feed. + specified in the data source. This field is a member of `oneof`_ ``_shipping_label``. gtin (MutableSequence[str]): @@ -694,7 +703,7 @@ class AggregatedReportingContextStatus(proto.Enum): Here's an example of how the aggregated status is computed: - Free listings \| Shopping Ads \| Status + Free listings \| Shopping ads \| Status --------------|--------------|------------------------------ Approved \| Approved \| ELIGIBLE Approved \| Pending \| ELIGIBLE Approved \| Disapproved \| ELIGIBLE_LIMITED Pending \| Pending \| @@ -1368,8 +1377,36 @@ class PriceInsightsProductView(proto.Message): 5% predicted increase in conversions). This field is a member of `oneof`_ ``_predicted_conversions_change_fraction``. + effectiveness (google.shopping.merchant_reports_v1beta.types.PriceInsightsProductView.Effectiveness): + The predicted effectiveness of applying the + price suggestion, bucketed. """ + class Effectiveness(proto.Enum): + r"""Predicted effectiveness bucket. + + Effectiveness indicates which products would benefit most from price + changes. This rating takes into consideration the performance boost + predicted by adjusting the sale price and the difference between + your current price and the suggested price. Price suggestions with + ``HIGH`` effectiveness are predicted to drive the largest increase + in performance. + + Values: + EFFECTIVENESS_UNSPECIFIED (0): + Effectiveness is unknown. + LOW (1): + Effectiveness is low. + MEDIUM (2): + Effectiveness is medium. + HIGH (3): + Effectiveness is high. + """ + EFFECTIVENESS_UNSPECIFIED = 0 + LOW = 1 + MEDIUM = 2 + HIGH = 3 + id: str = proto.Field( proto.STRING, number=1, @@ -1465,6 +1502,11 @@ class PriceInsightsProductView(proto.Message): number=19, optional=True, ) + effectiveness: Effectiveness = proto.Field( + proto.ENUM, + number=22, + enum=Effectiveness, + ) class BestSellersProductClusterView(proto.Message): @@ -1563,9 +1605,9 @@ class BestSellersProductClusterView(proto.Message): cluster. inventory_status (google.shopping.merchant_reports_v1beta.types.BestSellersProductClusterView.InventoryStatus): Whether the product cluster is ``IN_STOCK`` in your product - feed in at least one of the countries, ``OUT_OF_STOCK`` in - your product feed in all countries, or ``NOT_IN_INVENTORY`` - at all. + data source in at least one of the countries, + ``OUT_OF_STOCK`` in your product data source in all + countries, or ``NOT_IN_INVENTORY`` at all. The field doesn't take the Best sellers report country filter into account. @@ -1573,9 +1615,10 @@ class BestSellersProductClusterView(proto.Message): This field is a member of `oneof`_ ``_inventory_status``. brand_inventory_status (google.shopping.merchant_reports_v1beta.types.BestSellersProductClusterView.InventoryStatus): Whether there is at least one product of the brand currently - ``IN_STOCK`` in your product feed in at least one of the - countries, all products are ``OUT_OF_STOCK`` in your product - feed in all countries, or ``NOT_IN_INVENTORY``. + ``IN_STOCK`` in your product data source in at least one of + the countries, all products are ``OUT_OF_STOCK`` in your + product data source in all countries, or + ``NOT_IN_INVENTORY``. The field doesn't take the Best sellers report country filter into account. @@ -1882,6 +1925,80 @@ class BestSellersBrandView(proto.Message): ) +class NonProductPerformanceView(proto.Message): + r"""Fields available for query in ``non_product_performance_view`` + table. + + Performance data on images and online store links leading to your + non-product pages. This includes performance metrics (for example, + ``clicks``) and dimensions according to which performance metrics + are segmented (for example, ``date``). + + Segment fields cannot be selected in queries without also selecting + at least one metric field. + + Values are only set for fields requested explicitly in the request's + search query. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + date (google.type.date_pb2.Date): + Date in the merchant timezone to which metrics apply. + Segment. + + Condition on ``date`` is required in the ``WHERE`` clause. + week (google.type.date_pb2.Date): + First day of the week (Monday) of the metrics + date in the merchant timezone. Segment. + clicks (int): + Number of clicks on images and online store + links leading to your non-product pages. Metric. + + This field is a member of `oneof`_ ``_clicks``. + impressions (int): + Number of times images and online store links + leading to your non-product pages were shown. + Metric. + + This field is a member of `oneof`_ ``_impressions``. + click_through_rate (float): + Click-through rate - the number of clicks (``clicks``) + divided by the number of impressions (``impressions``) of + images and online store links leading to your non-product + pages. Metric. + + This field is a member of `oneof`_ ``_click_through_rate``. + """ + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + week: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + clicks: int = proto.Field( + proto.INT64, + number=3, + optional=True, + ) + impressions: int = proto.Field( + proto.INT64, + number=4, + optional=True, + ) + click_through_rate: float = proto.Field( + proto.DOUBLE, + number=5, + optional=True, + ) + + class CompetitiveVisibilityCompetitorView(proto.Message): r"""Fields available for query in ``competitive_visibility_competitor_view`` table. diff --git a/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py b/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py index 785334453b67..ec34aaa7841d 100644 --- a/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py +++ b/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py @@ -1520,13 +1520,13 @@ def test_search_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.search(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py index 2b8ab5ef34d7..270553bf5142 100644 --- a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py +++ b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py @@ -889,13 +889,13 @@ def test_list_occurrences_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_occurrences(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3857,13 +3857,13 @@ def test_list_notes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_notes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6013,13 +6013,13 @@ def test_list_note_occurrences_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_note_occurrences(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/release-please-config.json b/release-please-config.json index f58500b99c35..12df79457ef3 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1203,6 +1203,21 @@ ], "release-type": "python" }, + "packages/google-cloud-developerconnect": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-developerconnect", + "extra-files": [ + "google/cloud/developerconnect/gapic_version.py", + "google/cloud/developerconnect_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-cloud-dialogflow": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1476,6 +1491,21 @@ ], "release-type": "python" }, + "packages/google-cloud-gdchardwaremanagement": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-gdchardwaremanagement", + "extra-files": [ + "google/cloud/gdchardwaremanagement/gapic_version.py", + "google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-cloud-gke-backup": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1721,6 +1751,21 @@ ], "release-type": "python" }, + "packages/google-cloud-managedkafka": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-managedkafka", + "extra-files": [ + "google/cloud/managedkafka/gapic_version.py", + "google/cloud/managedkafka_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-cloud-media-translation": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -3138,16 +3183,10 @@ "extra-files": [ "google/maps/mapsplatformdatasets/gapic_version.py", "google/maps/mapsplatformdatasets_v1/gapic_version.py", - "google/maps/mapsplatformdatasets_v1alpha/gapic_version.py", { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json", "type": "json" - }, - { - "jsonpath": "$.clientLibrary.version", - "path": "samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json", - "type": "json" } ], "release-type": "python" @@ -3227,6 +3266,21 @@ ], "release-type": "python" }, + "packages/google-shopping-merchant-accounts": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-shopping-merchant-accounts", + "extra-files": [ + "google/shopping/merchant_accounts/gapic_version.py", + "google/shopping/merchant_accounts_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-shopping-merchant-conversions": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -3242,6 +3296,21 @@ ], "release-type": "python" }, + "packages/google-shopping-merchant-datasources": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-shopping-merchant-datasources", + "extra-files": [ + "google/shopping/merchant_datasources/gapic_version.py", + "google/shopping/merchant_datasources_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-shopping-merchant-inventories": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -3287,6 +3356,36 @@ ], "release-type": "python" }, + "packages/google-shopping-merchant-products": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-shopping-merchant-products", + "extra-files": [ + "google/shopping/merchant_products/gapic_version.py", + "google/shopping/merchant_products_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-shopping-merchant-promotions": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-shopping-merchant-promotions", + "extra-files": [ + "google/shopping/merchant_promotions/gapic_version.py", + "google/shopping/merchant_promotions_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-shopping-merchant-quota": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, diff --git a/scripts/requirements.txt b/scripts/requirements.txt index 2c24336eb316..62121bf2e035 100644 --- a/scripts/requirements.txt +++ b/scripts/requirements.txt @@ -1 +1 @@ -requests==2.31.0 +requests==2.32.0 diff --git a/scripts/updateapilist.py b/scripts/updateapilist.py index bb497c274a3e..d3f231c05cb8 100644 --- a/scripts/updateapilist.py +++ b/scripts/updateapilist.py @@ -16,9 +16,14 @@ import os import requests +import logging from typing import List, Optional from dataclasses import dataclass +# Configure logging to output messages to console +logging.basicConfig(level=logging.INFO) # Set the desired logging level + +import re class MissingGithubToken(ValueError): """Raised when the GITHUB_TOKEN environment variable is not set""" @@ -57,9 +62,20 @@ class MissingGithubToken(ValueError): # BASE_API defines the base API for Github. BASE_API = "https://api.github.com" +# GITHUB_ISSUES defines the issues URL for a repository on GitHub. +GITHUB_ISSUES = "https://github.com/{repo}/issues" +# BASE_ISSUE_TRACKER defines the base URL for issue tracker. +BASE_ISSUE_TRACKER = "https://issuetracker.google.com" +# This issue-tracker component is part of some saved searches for listing API-side issues. +# However, when we construct URLs for filing new issues (which in some cases we do by analyzing +# the query string for a saved search), we want to ensure we DON'T file a new issue against +# this generic component but against a more specific one. +GENERIC_ISSUE_TRACKER_COMPONENT = "187065" +# This sentinel value is used to mark cache fields that have not been computed yet. +NOT_COMPUTED = -1 class CloudClient: repo: str = None @@ -68,6 +84,7 @@ class CloudClient: distribution_name: str = None issue_tracker: str = None + def __init__(self, repo: dict): self.repo = repo["repo"] # For now, strip out "Google Cloud" to standardize the titles @@ -75,6 +92,89 @@ def __init__(self, repo: dict): self.release_level = repo["release_level"] self.distribution_name = repo["distribution_name"] self.issue_tracker = repo.get("issue_tracker") + self._cached_component_id = NOT_COMPUTED + self._cached_template_id = NOT_COMPUTED + self._cached_saved_search_id = NOT_COMPUTED + + @property + def saved_search_id(self): + if self._cached_saved_search_id != NOT_COMPUTED: + return self._cached_saved_search_id + if not self.issue_tracker: + self._cached_saved_search_id = None + else: + match = re.search(r'savedsearches/(\d+)', self.issue_tracker) + self._cached_saved_search_id = match.group(1) if match else None + return self._cached_saved_search_id + + @property + def saved_search_response_text(self): + if not self.saved_search_id: + return None + url = f"{BASE_ISSUE_TRACKER}/action/saved_searches/{self.saved_search_id}" + response = _fetch_response(url) + return response.text if response else None + + @property + def issue_tracker_component_id(self): + if self._cached_component_id != NOT_COMPUTED: + return self._cached_component_id + + # First, check if the issue tracker is a saved search: + query_string = self.saved_search_response_text or self.issue_tracker + if not query_string: + self._cached_component_id = None + else: + # Try to match 'component=' in the query string + query_match = re.search(r'\bcomponent=(\d+)', query_string) + if query_match: + self._cached_component_id = query_match.group(1) + else: + # If not found, try to match 'componentid:' in the query string + query_match = re.findall(r'\bcomponentid:(\d+)', query_string) + for component_id in query_match: + if component_id == GENERIC_ISSUE_TRACKER_COMPONENT: + continue + if self._cached_component_id != NOT_COMPUTED: + self._cached_component_id = None + logging.error(f"More than one component ID found for issue tracker: {self.issue_tracker}") + break + self._cached_component_id = component_id + self._cached_component_id = self._cached_component_id if self._cached_component_id != NOT_COMPUTED else None + return self._cached_component_id + + @property + def issue_tracker_template_id(self): + if self._cached_template_id != NOT_COMPUTED: + return self._cached_template_id + if not self.issue_tracker: + self._cached_template_id = None + else: + match = re.search(r'(?:\?|&)template=(\d+)', self.issue_tracker) + self._cached_template_id = match.group(1) if match else None + return self._cached_template_id + + @property + def show_client_issues(self): + return GITHUB_ISSUES.format(repo=self.repo) + + @property + def file_api_issue(self): + if self.issue_tracker_component_id: + link = f"{BASE_ISSUE_TRACKER}/issues/new?component={self.issue_tracker_component_id}" + if self.issue_tracker_template_id: + link += f"&template={self.issue_tracker_template_id}" + return link + return None + + @property + def show_api_issues(self): + if self.saved_search_id: + # Return the original issue_tracker content, which already links to the saved search. + return self.issue_tracker + elif self.issue_tracker_component_id: + return f"{BASE_ISSUE_TRACKER}/issues?q=componentid:{self.issue_tracker_component_id}" + return None # For sorting, we want to sort by release level, then API pretty_name def __lt__(self, other): @@ -95,15 +195,35 @@ class Extractor: def client_for_repo(self, repo_slug) -> Optional[CloudClient]: path = self.path_format.format(repo_slug=repo_slug) url = f"{RAW_CONTENT_BASE_URL}/{path}/{REPO_METADATA_FILENAME}" - response = requests.get(url) - if response.status_code != requests.codes.ok: - return - - return CloudClient(response.json()) + _, metadata = _fetch_and_parse_response(url) + if not metadata: + return None + return CloudClient(metadata) def get_clients_from_batch_response(self, response_json) -> List[CloudClient]: return [self.client_for_repo(repo[self.response_key]) for repo in response_json if allowed_repo(repo)] +def _fetch_response(url: str, headers:dict = None, params:Optional[dict] = None) -> Optional[requests.Response]: + try: + response = requests.get(url, headers=headers, params=params) + response.raise_for_status() + return response + except requests.RequestException as e: + logging.error(f"Request failed for URL {url}: {e}") + return None + +def _parse_response(response: requests.Response) -> Optional[dict]: + try: + return response.json() + except ValueError as e: + logging.error(f"JSON decoding failed for URL {response.url}: {e}") + return None + +def _fetch_and_parse_response(url: str, headers:dict = None, params:Optional[dict] = None): + response = _fetch_response(url, headers, params) + if not response: + return None, None + return response, _parse_response(response) def replace_content_in_readme(content_rows: List[str]) -> None: START_MARKER = ".. API_TABLE_START" @@ -135,16 +255,17 @@ def client_row(client: CloudClient) -> str: url = f"https://github.com/{client.repo}" if client.repo == MONO_REPO: url += f"/tree/main/packages/{client.distribution_name}" - + _show_api_issues = client.show_api_issues + _file_api_issue = client.file_api_issue content_row = [ f" * - `{client.title} <{url}>`_\n", - f" - " + client.release_level + "\n", - f" - |PyPI-{client.distribution_name}|\n", + f" - {client.release_level}\n", + f" - |PyPI-{client.distribution_name}|\n", + f" - `API Issues <{_show_api_issues}>`_\n" if _show_api_issues else " -\n", + f" - `File an API Issue <{_file_api_issue}>`_\n" if _file_api_issue else " -\n", + f" - `Client Library Issues <{client.show_client_issues}>`_\n" ] - if client.issue_tracker: - content_row.append(f" - `API Issues <{client.issue_tracker}>`_\n") - return (content_row, pypi_badge) @@ -157,7 +278,9 @@ def generate_table_contents(clients: List[CloudClient]) -> List[str]: " * - Client\n", " - Release Level\n", " - Version\n", - " - API Issue Tracker\n", + " - API Issues\n", + " - File an API Issue\n", + " - Client Library Issues\n", ] pypi_links = ["\n"] @@ -181,30 +304,33 @@ def mono_repo_clients(token: str) -> List[CloudClient]: # all mono repo clients url = f"{BASE_API}/repos/{MONO_REPO}/contents/packages" headers = {'Authorization': f'token {token}'} - response = requests.get(url=url, headers=headers) + _, packages = _fetch_and_parse_response(url, headers) + if not packages: + return [] mono_repo_extractor = Extractor(path_format=MONO_REPO_PATH_FORMAT, response_key=PACKAGE_RESPONSE_KEY) - - return mono_repo_extractor.get_clients_from_batch_response(response.json()) + return mono_repo_extractor.get_clients_from_batch_response(packages) def split_repo_clients(token: str) -> List[CloudClient]: - - first_request = True - while first_request or 'next' in response.links: - if first_request: - url = f"{BASE_API}/search/repositories?page=1" - first_request = False - else: - url = response.links['next']['url'] - headers = {'Authorization': f'token {token}'} - params = {'per_page': 100, "q": "python- in:name org:googleapis"} - response = requests.get(url=url, params=params, headers=headers) - repositories = response.json().get("items", []) + clients = [] + url = f"{BASE_API}/search/repositories?page=1" + headers = {'Authorization': f'token {token}'} + params = {'per_page': 100, "q": "python- in:name org:googleapis"} + + while url: + response, metadata = _fetch_and_parse_response(url, headers, params) + if not metadata: + break + repositories = metadata.get("items", []) if len(repositories) == 0: break - split_repo_extractor = Extractor(path_format=SPLIT_REPO_PATH_FORMAT, response_key=REPO_RESPONSE_KEY) - return split_repo_extractor.get_clients_from_batch_response(repositories) + clients.extend(split_repo_extractor.get_clients_from_batch_response(repositories)) + + # Check for the 'next' link in the response headers for pagination + url = response.links.get('next', {}).get('url') + + return clients def get_token():